diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 86df3544ddfc6..ca2cbc09f7c2f 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -201,14 +201,20 @@ public void beforeStart() { try { mockServer.start(); node.setting("telemetry.metrics.enabled", "true"); + node.setting("tracing.apm.agent.enabled", "true"); + node.setting("tracing.apm.agent.transaction_sample_rate", "0.10"); + node.setting("tracing.apm.agent.metrics_interval", "10s"); node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); } catch (IOException e) { logger.warn("Unable to start APM server", e); } - } else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { - // in serverless metrics are enabled by default - // if metrics were not enabled explicitly for gradlew run we should disable them + } + // in serverless metrics are enabled by default + // if metrics were not enabled explicitly for gradlew run we should disable them + else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); + } else if (node.getSettingKeys().contains("tracing.apm.agent.enabled") == false) { // tracing + node.setting("tracing.apm.agent.enable", "false"); } } diff --git a/docs/build.gradle b/docs/build.gradle index da3d83378e894..ddd2a38b5160b 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -111,6 +111,7 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { systemProperty 'es.transport.cname_in_publish_address', 'true' requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.12.0") extraConfigFile 'op-jwks.json', project(':x-pack:test:idp-fixture').file("oidc/op-jwks.json") extraConfigFile 'idp-docs-metadata.xml', project(':x-pack:test:idp-fixture').file("idp/shibboleth-idp/metadata/idp-docs-metadata.xml") diff --git a/docs/changelog/102078.yaml b/docs/changelog/102078.yaml new file mode 100644 index 0000000000000..d031aa0dbf6f7 --- /dev/null +++ b/docs/changelog/102078.yaml @@ -0,0 +1,5 @@ +pr: 102078 +summary: Derive expected replica size from primary +area: Allocation +type: enhancement +issues: [] diff --git a/docs/changelog/102967.yaml b/docs/changelog/102967.yaml new file mode 100644 index 0000000000000..cdde735f6c077 --- /dev/null +++ b/docs/changelog/102967.yaml @@ -0,0 +1,6 @@ +pr: 102967 +summary: "ES|QL: Improve resolution error management in `mv_expand`" +area: ES|QL +type: bug +issues: + - 102964 diff --git a/docs/changelog/103035.yaml b/docs/changelog/103035.yaml new file mode 100644 index 0000000000000..5b1c9d6629767 --- /dev/null +++ b/docs/changelog/103035.yaml @@ -0,0 +1,5 @@ +pr: 103035 +summary: "x-pack/plugin/core: add `match_mapping_type` to `ecs@mappings` dynamic templates" +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/103084.yaml b/docs/changelog/103084.yaml new file mode 100644 index 0000000000000..fb5a718a086de --- /dev/null +++ b/docs/changelog/103084.yaml @@ -0,0 +1,6 @@ +pr: 103084 +summary: Return `matched_queries` in Percolator +area: Percolator +type: enhancement +issues: + - 10163 diff --git a/docs/changelog/103209.yaml b/docs/changelog/103209.yaml new file mode 100644 index 0000000000000..05ae8c13bcb5c --- /dev/null +++ b/docs/changelog/103209.yaml @@ -0,0 +1,6 @@ +pr: 103209 +summary: "ESQL: Fix `to_degrees()` returning infinity" +area: ES|QL +type: bug +issues: + - 102987 diff --git a/docs/changelog/103251.yaml b/docs/changelog/103251.yaml new file mode 100644 index 0000000000000..0c5c6d6e4d776 --- /dev/null +++ b/docs/changelog/103251.yaml @@ -0,0 +1,5 @@ +pr: 103251 +summary: Wait for reroute before acking put-shutdown +area: Infra/Node Lifecycle +type: bug +issues: [] diff --git a/docs/changelog/103310.yaml b/docs/changelog/103310.yaml new file mode 100644 index 0000000000000..a7a0746b6b8c4 --- /dev/null +++ b/docs/changelog/103310.yaml @@ -0,0 +1,5 @@ +pr: 103310 +summary: Revert "Validate settings in `ReloadSecureSettings` API" +area: Security +type: bug +issues: [] diff --git a/docs/reference/esql/esql-functions.asciidoc b/docs/reference/esql/esql-functions.asciidoc index b921719fc097b..c2e943f7555d6 100644 --- a/docs/reference/esql/esql-functions.asciidoc +++ b/docs/reference/esql/esql-functions.asciidoc @@ -127,9 +127,11 @@ include::functions/tan.asciidoc[] include::functions/tanh.asciidoc[] include::functions/tau.asciidoc[] include::functions/to_boolean.asciidoc[] +include::functions/to_cartesianpoint.asciidoc[] include::functions/to_datetime.asciidoc[] include::functions/to_degrees.asciidoc[] include::functions/to_double.asciidoc[] +include::functions/to_geopoint.asciidoc[] include::functions/to_integer.asciidoc[] include::functions/to_ip.asciidoc[] include::functions/to_long.asciidoc[] diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index f1971fd409754..00f5b056c7ebe 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -34,6 +34,9 @@ include::processing-commands/limit.asciidoc[tag=limitation] * `text` * `unsigned_long` * `version` +* Spatial types +** `geo_point` +** `point` [discrete] ==== Unsupported types @@ -44,10 +47,8 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `counter` ** `position` ** `aggregate_metric_double` -* Geo/spatial -** `geo_point` +* Spatial types ** `geo_shape` -** `point` ** `shape` * Date/time ** `date_nanos` @@ -119,7 +120,7 @@ consequences. An {esql} query on a `text` field is case-sensitive. Furthermore, a subfield may have been mapped with a <>, which can transform the original string. Or it may have been mapped with <>, which can truncate the string. None of these mapping operations are applied to -an {esql} query, which may lead to false positives or negatives. +an {esql} query, which may lead to false positives or negatives. To avoid these issues, a best practice is to be explicit about the field that you query, and query `keyword` sub-fields instead of `text` fields. @@ -197,4 +198,4 @@ the <>. [[esql-limitations-kibana]] === Kibana limitations -include::esql-kibana.asciidoc[tag=limitations] \ No newline at end of file +include::esql-kibana.asciidoc[tag=limitations] diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 32e97b7316d84..2d4daa6ad2eca 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -2,19 +2,91 @@ [[esql-binary-operators]] === Binary operators -These binary comparison operators are supported: +[[esql-binary-operators-equality]] +==== Equality +[.text-center] +image::esql/functions/signature/equals.svg[Embedded,opts=inline] + +Supported types: + +include::types/equals.asciidoc[] + +==== Inequality `!=` +[.text-center] +image::esql/functions/signature/not_equals.svg[Embedded,opts=inline] -* equality: `==` -* inequality: `!=` -* less than: `<` -* less than or equal: `<=` -* larger than: `>` -* larger than or equal: `>=` +Supported types: -And these mathematical operators are supported: +include::types/not_equals.asciidoc[] +==== Less than `<` +[.text-center] +image::esql/functions/signature/less_than.svg[Embedded,opts=inline] + +Supported types: + +include::types/less_than.asciidoc[] + +==== Less than or equal to `<=` +[.text-center] +image::esql/functions/signature/less_than_or_equal.svg[Embedded,opts=inline] + +Supported types: + +include::types/less_than_or_equal.asciidoc[] + +==== Greater than `>` +[.text-center] +image::esql/functions/signature/greater_than.svg[Embedded,opts=inline] + +Supported types: + +include::types/greater_than.asciidoc[] + +==== Greater than or equal to `>=` +[.text-center] +image::esql/functions/signature/greater_than_or_equal.svg[Embedded,opts=inline] + +Supported types: + +include::types/greater_than_or_equal.asciidoc[] + +==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] +Supported types: + +include::types/add.asciidoc[] + +==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] + +Supported types: + +include::types/sub.asciidoc[] + +==== Multiply `*` +[.text-center] +image::esql/functions/signature/mul.svg[Embedded,opts=inline] + +Supported types: + +include::types/mul.asciidoc[] + +==== Divide `/` +[.text-center] +image::esql/functions/signature/div.svg[Embedded,opts=inline] + +Supported types: + +include::types/div.asciidoc[] + +==== Modulus `%` +[.text-center] +image::esql/functions/signature/mod.svg[Embedded,opts=inline] + +Supported types: + +include::types/mod.asciidoc[] diff --git a/docs/reference/esql/functions/signature/greater_than_or_equal.svg b/docs/reference/esql/functions/signature/greater_than_or_equal.svg new file mode 100644 index 0000000000000..6afb36d4b4eff --- /dev/null +++ b/docs/reference/esql/functions/signature/greater_than_or_equal.svg @@ -0,0 +1 @@ +lhs>=rhs \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/less_than_or_equal.svg b/docs/reference/esql/functions/signature/less_than_or_equal.svg new file mode 100644 index 0000000000000..da93c172b7136 --- /dev/null +++ b/docs/reference/esql/functions/signature/less_than_or_equal.svg @@ -0,0 +1 @@ +lhs<=rhs \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg deleted file mode 100644 index 01fe0a4770156..0000000000000 --- a/docs/reference/esql/functions/signature/to_degrees.svg +++ /dev/null @@ -1 +0,0 @@ -TO_DEGREES(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..1fb64542681e2 --- /dev/null +++ b/docs/reference/esql/functions/to_cartesianpoint.asciidoc @@ -0,0 +1,19 @@ +[discrete] +[[esql-to_cartesianpoint]] +=== `TO_CARTESIANPOINT` +Converts an input value to a `point` value. + +The input can be a single- or multi-valued field or an expression. +The input type must be a string or a cartesian `point`. + +A string will only be successfully converted if it respects the +https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[WKT Point] format: + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=to_cartesianpoint-str] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=to_cartesianpoint-str-result] +|=== diff --git a/docs/reference/esql/functions/to_geopoint.asciidoc b/docs/reference/esql/functions/to_geopoint.asciidoc new file mode 100644 index 0000000000000..83936af0c71b3 --- /dev/null +++ b/docs/reference/esql/functions/to_geopoint.asciidoc @@ -0,0 +1,19 @@ +[discrete] +[[esql-to_geopoint]] +=== `TO_GEOPOINT` +Converts an input value to a `geo_point` value. + +The input can be a single- or multi-valued field or an expression. +The input type must be a string or a `geo_point`. + +A string will only be successfully converted if it respects the +https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry[WKT Point] format: + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=to_geopoint-str] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=to_geopoint-str-result] +|=== diff --git a/docs/reference/esql/functions/type-conversion-functions.asciidoc b/docs/reference/esql/functions/type-conversion-functions.asciidoc index 640006c936526..48a9b175d3d65 100644 --- a/docs/reference/esql/functions/type-conversion-functions.asciidoc +++ b/docs/reference/esql/functions/type-conversion-functions.asciidoc @@ -9,9 +9,11 @@ // tag::type_list[] * <> +* <> * <> * <> * <> +* <> * <> * <> * <> @@ -22,9 +24,11 @@ // end::type_list[] include::to_boolean.asciidoc[] +include::to_cartesianpoint.asciidoc[] include::to_datetime.asciidoc[] include::to_degrees.asciidoc[] include::to_double.asciidoc[] +include::to_geopoint.asciidoc[] include::to_integer.asciidoc[] include::to_ip.asciidoc[] include::to_long.asciidoc[] diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc new file mode 100644 index 0000000000000..7783d08bc3aaa --- /dev/null +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +date_period | date_period | date_period +date_period | datetime | datetime +datetime | date_period | datetime +datetime | time_duration | datetime +double | double | double +integer | integer | integer +long | long | long +time_duration | time_duration | time_duration +|=== diff --git a/docs/reference/esql/functions/types/div.asciidoc b/docs/reference/esql/functions/types/div.asciidoc new file mode 100644 index 0000000000000..eee2d68e4653f --- /dev/null +++ b/docs/reference/esql/functions/types/div.asciidoc @@ -0,0 +1,7 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +double | double | double +integer | integer | integer +long | long | long +|=== diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/mod.asciidoc b/docs/reference/esql/functions/types/mod.asciidoc new file mode 100644 index 0000000000000..eee2d68e4653f --- /dev/null +++ b/docs/reference/esql/functions/types/mod.asciidoc @@ -0,0 +1,7 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +double | double | double +integer | integer | integer +long | long | long +|=== diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc new file mode 100644 index 0000000000000..eee2d68e4653f --- /dev/null +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -0,0 +1,7 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +double | double | double +integer | integer | integer +long | long | long +|=== diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/neg.asciidoc similarity index 50% rename from docs/reference/esql/functions/types/to_degrees.asciidoc rename to docs/reference/esql/functions/types/neg.asciidoc index 7cb7ca46022c2..1b841483fb22e 100644 --- a/docs/reference/esql/functions/types/to_degrees.asciidoc +++ b/docs/reference/esql/functions/types/neg.asciidoc @@ -1,8 +1,9 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== v | result +date_period | date_period double | double -integer | double -long | double -unsigned_long | double +integer | integer +long | long +time_duration | time_duration |=== diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc new file mode 100644 index 0000000000000..27fb19b6d38a2 --- /dev/null +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -0,0 +1,5 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +integer | integer | boolean +|=== diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc new file mode 100644 index 0000000000000..ed26adf06ecde --- /dev/null +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -0,0 +1,11 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +lhs | rhs | result +date_period | date_period | date_period +datetime | date_period | datetime +datetime | time_duration | datetime +double | double | double +integer | integer | integer +long | long | long +time_duration | time_duration | time_duration +|=== diff --git a/docs/reference/esql/functions/unary.asciidoc b/docs/reference/esql/functions/unary.asciidoc index 2ee35b6c6256f..69ce754c1b4a0 100644 --- a/docs/reference/esql/functions/unary.asciidoc +++ b/docs/reference/esql/functions/unary.asciidoc @@ -2,7 +2,11 @@ [[esql-unary-operators]] === Unary operators -These unary mathematical operators are supported: +The only unary operators is negation (`-`): [.text-center] image::esql/functions/signature/neg.svg[Embedded,opts=inline] + +Supported types: + +include::types/neg.asciidoc[] diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index c5a3ebb782edd..25b995eefc219 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -326,6 +326,7 @@ case the search request would fail with a version conflict error. The search response returned is identical as in the previous example. + ==== Percolate query and highlighting The `percolate` query is handled in a special way when it comes to highlighting. The queries hits are used @@ -549,6 +550,136 @@ The slightly different response: <1> The highlight fields have been prefixed with the document slot they belong to, in order to know which highlight field belongs to what document. +==== Named queries within percolator queries + +If a stored percolator query is a complex query, and you want to track which +its sub-queries matched a percolated document, then you can use the `\_name` +parameter for its sub-queries. In this case, in a response, each hit together with +a `_percolator_document_slot` field contains +`_percolator_document_slot__matched_queries` fields that show +which sub-queries matched each percolated document. + +For example: + +[source,console] +-------------------------------------------------- +PUT /my-index-000001/_doc/5?refresh +{ + "query": { + "bool": { + "should": [ + { + "match": { + "message": { + "query": "Japanese art", + "_name": "query1" + } + } + }, + { + "match": { + "message": { + "query": "Holand culture", + "_name": "query2" + } + } + } + ] + } + } +} +-------------------------------------------------- +// TEST[continued] + +[source,console] +-------------------------------------------------- +GET /my-index-000001/_search +{ + "query": { + "percolate": { + "field": "query", + "documents": [ + { + "message": "Japanse art" + }, + { + "message": "Holand culture" + }, + { + "message": "Japanese art and Holand culture" + }, + { + "message": "no-match" + } + ] + } + } +} +-------------------------------------------------- +// TEST[continued] + +[source,console-result] +-------------------------------------------------- +{ + "took": 55, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped" : 0, + "failed": 0 + }, + "hits": { + "total" : { + "value": 1, + "relation": "eq" + }, + "max_score": 1.1181908, + "hits": [ + { + "_index": "my-index-000001", + "_id": "5", + "_score": 1.1181908, + "_source": { + "query": { + "bool": { + "should": [ + { + "match": { + "message": { + "query": "Japanese art", + "_name": "query1" + } + } + }, + { + "match": { + "message": { + "query": "Holand culture", + "_name": "query2" + } + } + } + ] + } + } + }, + "fields" : { + "_percolator_document_slot" : [0, 1, 2], + "_percolator_document_slot_0_matched_queries" : ["query1"], <1> + "_percolator_document_slot_1_matched_queries" : ["query2"], <2> + "_percolator_document_slot_2_matched_queries" : ["query1", "query2"] <3> + } + } + ] + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"took": 55,/"took": "$body.took",/] +<1> The first document matched only the first sub-query. +<2> The second document matched only the second sub-query. +<3> The third document matched both sub-queries. + ==== Specifying multiple percolate queries It is possible to specify multiple `percolate` queries in a single search request: diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 55f277218d210..ca8a191ad4b2c 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -611,9 +611,10 @@ The `similarity` parameter is the direct vector similarity calculation. * `l2_norm`: also known as Euclidean, will include documents where the vector is within the `dims` dimensional hypersphere with radius `similarity` with origin at `query_vector`. -* `cosine` & `dot_product`: Only return vectors where the cosine similarity or dot-product are at least the provided +* `cosine`, `dot_product`, and `max_inner_product`: Only return vectors where the cosine similarity or dot-product are at least the provided `similarity`. -- +Read more here: <> end::knn-similarity[] tag::lenient[] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index ff64535c705d9..496e0cf1b9d4f 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -547,6 +547,7 @@ score = 0.9 * match_score + 0.1 * knn_score_image-vector + 0.5 * knn_score_title ``` [discrete] +[[knn-similarity-search]] ==== Search kNN with expected similarity While kNN is a powerful tool, it always tries to return `k` nearest neighbors. Consequently, when using `knn` with @@ -563,6 +564,18 @@ minimum similarity for a vector to be considered a match. The `knn` search flow * Do not return any vectors that are further away than the configured `similarity` -- +NOTE: `similarity` is the true <> before it has been transformed into `_score` and boost applied. + +For each configured <>, here is the corresponding inverted `_score` function. This is so if you are wanting to filter from a `_score` perspective, you can do this minor transformation to correctly reject irrelevant results. +-- + - `l2_norm`: `sqrt((1 / _score) - 1)` + - `cosine`: `(2 * _score) - 1` + - `dot_product`: `(2 * _score) - 1` + - `max_inner_product`: + - `_score < 1`: `1 - (1 / _score)` + - `_score >= 1`: `_score - 1` +-- + Here is an example. In this example we search for the given `query_vector` for `k` nearest neighbors. However, with `filter` applied and requiring that the found vectors have at least the provided `similarity` between them. [source,console] diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc new file mode 100644 index 0000000000000..7fbdecc0aebce --- /dev/null +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -0,0 +1,276 @@ +[[semantic-search-inference]] +=== Tutorial: semantic search with the {infer} API +++++ +Semantic search with the {infer} API +++++ + +The instructions in this tutorial shows you how to use the {infer} API with the +Open AI service to perform semantic search on your data. The following example +uses OpenAI's `text-embedding-ada-002` second generation embedding model. You +can use any OpenAI models, they are all supported by the {infer} API. + + +[discrete] +[[infer-openai-requirements]] +==== Requirements + +An https://openai.com/[OpenAI account] is required to use the {infer} API with +the OpenAI service. + + +[discrete] +[[infer-text-embedding-task]] +==== Create the inference task + +Create the {infer} task by using the <>: + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/openai_embeddings <1> +{ + "service": "openai", + "service_settings": { + "api_key": "" <2> + }, + "task_settings": { + "model": "text-embedding-ada-002" <3> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `text_embedding` in the path. +<2> The API key of your OpenAI account. You can find your OpenAI API keys in +your OpenAI account under the +https://platform.openai.com/api-keys[API keys section]. You need to provide +your API key only once. The <> does not return your API +key. +<3> The name of the embedding model to use. You can find the list of OpenAI +embedding models +https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. + + +[discrete] +[[infer-openai-mappings]] +==== Create the index mapping + +The mapping of the destination index - the index that contains the embeddings +that the model will create based on your input text - must be created. The +destination index must have a field with the <> +field type to index the output of the OpenAI model. + +[source,console] +-------------------------------------------------- +PUT openai-embeddings +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "dense_vector", <2> + "dims": 1536, <3> + "element_type": "byte", + "similarity": "dot_product" <4> + }, + "content": { <5> + "type": "text" <6> + } + } + } +} +-------------------------------------------------- +<1> The name of the field to contain the generated tokens. It must be refrenced +in the {infer} pipeline configuration in the next step. +<2> The field to contain the tokens is a `dense_vector` field. +<3> The output dimensions of the model. Find this value in the +https://platform.openai.com/docs/guides/embeddings/embedding-models[OpenAI documentation] +of the model you use. +<4> The faster` dot_product` function can be used to calculate similarity +because OpenAI embeddings are normalised to unit length. You can check the +https://platform.openai.com/docs/guides/embeddings/which-distance-function-should-i-use[OpenAI docs] +about which similarity function to use. +<5> The name of the field from which to create the sparse vector representation. +In this example, the name of the field is `content`. It must be referenced in +the {infer} pipeline configuration in the next step. +<6> The field type which is text in this example. + + +[discrete] +[[infer-openai-inference-ingest-pipeline]] +==== Create an ingest pipeline with an inference processor + +Create an <> with an +<> and use the OpenAI model you created +above to infer against the data that is being ingested in the +pipeline. + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/openai_embeddings +{ + "processors": [ + { + "inference": { + "model_id": "openai_embeddings", <1> + "input_output": { <2> + "input_field": "content", + "output_field": "content_embedding" + } + } + } + ] +} +-------------------------------------------------- +<1> The name of the inference model you created by using the +<>. +<2> Configuration object that defines the `input_field` for the {infer} process +and the `output_field` that will contain the {infer} results. + +//// +[source,console] +---- +DELETE _ingest/pipeline/openai_embeddings +---- +// TEST[continued] +//// + + +[discrete] +[[infer-load-data]] +==== Load data + +In this step, you load the data that you later use in the {infer} ingest +pipeline to create embeddings from it. + +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS +MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by +a list of relevant text passages. All unique passages, along with their IDs, +have been extracted from that data set and compiled into a +https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. + +Download the file and upload it to your cluster using the +{kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] +in the {ml-app} UI. Assign the name `id` to the first column and `content` to +the second column. The index name is `test-data`. Once the upload is complete, +you can see an index named `test-data` with 182469 documents. + + +[discrete] +[[reindexing-data-infer]] +==== Ingest the data through the {infer} ingest pipeline + +Create the embeddings from the text by reindexing the data throught the {infer} +pipeline that uses the OpenAI model as the inference model. + +[source,console] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "openai-embeddings", + "pipeline": "openai_embeddings" + } +} +---- +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +NOTE: The +https://platform.openai.com/account/limits[rate limit of your OpenAI account] +may affect the throughput of the reindexing process. If this happens, change +`size` to `3` or a similar value in magnitude. + +The call returns a task ID to monitor the progress: + +[source,console] +---- +GET _tasks/ +---- +// TEST[skip:TBD] + +You can also cancel the reindexing process if you don't want to wait until the +reindexing process is fully complete which might take hours: + +[source,console] +---- +POST _tasks//_cancel +---- +// TEST[skip:TBD] + + +[discrete] +[[infer-semantic-search]] +==== Semantic search + +After the dataset has been enriched with the embeddings, you can query the data +using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a +`query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and +provide the query text and the model you have used to create the embeddings. + +NOTE: If you cancelled the reindexing process, you run the query only a part of +the data which affects the quality of your results. + +[source,console] +-------------------------------------------------- +GET openai-embeddings/_search +{ + "knn": { + "field": "content_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "openai_embeddings", + "model_text": "Calculate fuel cost" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "content" + ] +} +-------------------------------------------------- +// TEST[skip:TBD] + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `openai-embeddings` index sorted by their proximity to the query: + +[source,consol-result] +-------------------------------------------------- +"hits": [ + { + "_index": "openai-embeddings", + "_id": "DDd5OowBHxQKHyc3TDSC", + "_score": 0.83704096, + "_source": { + "id": 862114, + "body": "How to calculate fuel cost for a road trip. By Tara Baukus Mello • Bankrate.com. Dear Driving for Dollars, My family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost.It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes.y family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost. It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes." + } + }, + { + "_index": "openai-embeddings", + "_id": "ajd5OowBHxQKHyc3TDSC", + "_score": 0.8345704, + "_source": { + "id": 820622, + "body": "Home Heating Calculator. Typically, approximately 50% of the energy consumed in a home annually is for space heating. When deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important.This calculator can help you estimate the cost of fuel for different heating appliances.hen deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important. This calculator can help you estimate the cost of fuel for different heating appliances." + } + }, + { + "_index": "openai-embeddings", + "_id": "Djd5OowBHxQKHyc3TDSC", + "_score": 0.8327426, + "_source": { + "id": 8202683, + "body": "Fuel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel.If you are paying $4 per gallon, the trip would cost you $200.Most boats have much larger gas tanks than cars.uel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel." + } + }, + (...) + ] +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 96281d12102bb..f4768e5c3a23d 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -135,3 +135,4 @@ include::{es-repo-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc ** The https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs`] repo contains a number of interactive semantic search examples in the form of executable Python notebooks, using the {es} Python client include::semantic-search-elser.asciidoc[] +include::semantic-search-inference.asciidoc[] diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 36e3a2cb5e2a9..6df51189e918e 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -48,6 +48,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -99,7 +100,7 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion, + final Predicate clusterFeaturesPredicate, final String os ) { return new ClientYamlDocsTestClient( @@ -107,7 +108,7 @@ protected ClientYamlTestClient initClientYamlTestClient( restClient, hosts, esVersion, - masterVersion, + clusterFeaturesPredicate, os, this::getClientBuilderWithSniffedHosts ); diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index 979815f497583..2c33b4f2dc992 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -73,7 +73,7 @@ public Collection createComponents(PluginServices services) { final APMMeterService apmMeter = new APMMeterService(settings); apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); logger.info("Sending apm metrics is {}", APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); - logger.info("Sending apm traces is {}", APMAgentSettings.APM_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); + logger.info("Sending apm tracing is {}", APMAgentSettings.APM_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); return List.of(apmTracer, apmMeter); } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java index c5ca8445b08eb..ae33c06b497db 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DisabledSecurityDataStreamTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.ClassRule; @@ -25,6 +26,7 @@ public abstract class DisabledSecurityDataStreamTestCase extends ESRestTestCase @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .setting("xpack.security.enabled", "false") .setting("xpack.watcher.enabled", "false") .build(); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecyclePermissionsRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecyclePermissionsRestIT.java index d662427c99d13..1c6329dcf922f 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecyclePermissionsRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecyclePermissionsRestIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; @@ -38,6 +39,7 @@ public class DataStreamLifecyclePermissionsRestIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .setting("xpack.watcher.enabled", "false") .setting("xpack.ml.enabled", "false") .setting("xpack.security.enabled", "true") diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 4d2c2af2266b1..21b1316e5685b 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -15,8 +15,8 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ResultDeduplicator; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -719,7 +719,7 @@ private void deleteIndexOnce(String indexName, String reason) { transportActionsDeduplicator.executeOnce( deleteIndexRequest, new ErrorRecordingActionListener( - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), indexName, errorStore, Strings.format("Data stream lifecycle encountered an error trying to delete index [%s]", indexName), diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index baa163c1ae75e..67bfae0740fb5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -26,9 +26,8 @@ */ public class DeleteDataStreamLifecycleAction { - public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/delete", - AcknowledgedResponse::readFrom + public static final ActionType INSTANCE = ActionType.acknowledgedResponse( + "indices:admin/data_stream/lifecycle/delete" ); private DeleteDataStreamLifecycleAction() {/* no instances */} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java index a4f4b88d17bca..f01d06fda8101 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java @@ -40,9 +40,8 @@ */ public class PutDataStreamLifecycleAction { - public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/put", - AcknowledgedResponse::readFrom + public static final ActionType INSTANCE = ActionType.acknowledgedResponse( + "indices:admin/data_stream/lifecycle/put" ); private PutDataStreamLifecycleAction() {/* no instances */} diff --git a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java index fa7b4ca1a80c0..37a83deeb3550 100644 --- a/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java +++ b/modules/data-streams/src/yamlRestTest/java/org/elasticsearch/datastreams/DataStreamsClientYamlTestSuiteIT.java @@ -19,6 +19,8 @@ import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.ClassRule; +import static org.elasticsearch.test.cluster.FeatureFlag.FAILURE_STORE_ENABLED; + public class DataStreamsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public DataStreamsClientYamlTestSuiteIT(final ClientYamlTestCandidate testCandidate) { @@ -43,6 +45,7 @@ protected Settings restClientSettings() { private static ElasticsearchCluster createCluster() { LocalClusterSpecBuilder clusterBuilder = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) + .feature(FAILURE_STORE_ENABLED) .setting("xpack.security.enabled", "true") .keystore("bootstrap.password", "x-pack-test-password") .user("x_pack_rest_user", "x-pack-test-password"); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index b1e0cf8ed7d90..6496930764ab8 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -210,10 +210,8 @@ setup: --- "Create data stream with failure store": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102873" -# version: " - 8.10.99" -# reason: "data stream failure stores only creatable in 8.11+" + version: " - 8.10.99" + reason: "data stream failure stores only creatable in 8.11+" - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml index a7d8476ee2dcf..303a584555f8f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml @@ -50,10 +50,8 @@ --- "Put index template with failure store": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102873" -# version: " - 8.10.99" -# reason: "data stream failure stores only creatable in 8.11+" + version: " - 8.10.99" + reason: "data stream failure stores only creatable in 8.11+" features: allowed_warnings - do: diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java index 7342a432dd5df..c4634f8d52729 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureFieldMapper.java @@ -9,8 +9,6 @@ package org.elasticsearch.index.mapper.extras; import org.apache.lucene.document.FeatureField; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -42,18 +40,6 @@ public class RankFeatureFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "rank_feature"; - public static class Defaults { - public static final FieldType FIELD_TYPE; - - static { - FieldType ft = new FieldType(); - ft.setTokenized(false); - ft.setIndexOptions(IndexOptions.NONE); - ft.setOmitNorms(true); - FIELD_TYPE = freezeAndDeduplicateFieldType(ft); - } - } - private static RankFeatureFieldType ft(FieldMapper in) { return ((RankFeatureFieldMapper) in).fieldType(); } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java index 12829ca802425..4d04e83361252 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/TokenCountFieldMapper.java @@ -206,14 +206,6 @@ public String analyzer() { return analyzer.name(); } - /** - * Indicates if position increments are counted. - * @return true if position increments are counted - */ - public boolean enablePositionIncrements() { - return enablePositionIncrements; - } - @Override protected String contentType() { return CONTENT_TYPE; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 11c726481d0b3..408b3f204de1a 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -143,7 +143,7 @@ boolean matchDocId(int docId) throws IOException { } @Override - public float score() throws IOException { + public float score() { return score; } }; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 886a67443e831..9a2653a61b60d 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -664,6 +664,11 @@ public > IFD getForField( CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(cache, circuitBreaker); } + + @Override + public void addNamedQuery(String name, Query query) { + delegate.addNamedQuery(name, query); + } }; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index e212264287937..be8d342254afd 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -530,6 +530,8 @@ static SearchExecutionContext configureContext(SearchExecutionContext context, b // as an analyzed string. wrapped.setAllowUnmappedFields(false); wrapped.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); + // We need to rewrite queries with name to Lucene NamedQuery to find matched sub-queries of percolator query + wrapped.setRewriteToNamedQueries(); return wrapped; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 43f365a2a722b..83703dcf10971 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.NamedMatches; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -96,7 +97,30 @@ public void process(HitContext hitContext) throws IOException { IntStream slots = convertTopDocsToSlots(topDocs, pc.rootDocsBySlot); // _percolator_document_slot fields are document fields and should be under "fields" section in a hit - hitContext.hit().setDocumentField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList()))); + List docSlots = slots.boxed().collect(Collectors.toList()); + hitContext.hit().setDocumentField(fieldName, new DocumentField(fieldName, docSlots)); + + // Add info what sub-queries of percolator query matched this each percolated document + if (fetchContext.getSearchExecutionContext().hasNamedQueries()) { + List leafContexts = percolatorIndexSearcher.getLeafContexts(); + assert leafContexts.size() == 1 : "Expected single leaf, but got [" + leafContexts.size() + "]"; + LeafReaderContext memoryReaderContext = leafContexts.get(0); + Weight weight = percolatorIndexSearcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1); + for (int i = 0; i < topDocs.scoreDocs.length; i++) { + List namedMatchesList = NamedMatches.findNamedMatches( + weight.matches(memoryReaderContext, topDocs.scoreDocs[i].doc) + ); + if (namedMatchesList.isEmpty()) { + continue; + } + List matchedQueries = new ArrayList<>(namedMatchesList.size()); + for (NamedMatches match : namedMatchesList) { + matchedQueries.add(match.getName()); + } + String matchedFieldName = fieldName + "_" + docSlots.get(i) + "_matched_queries"; + hitContext.hit().setDocumentField(matchedFieldName, new DocumentField(matchedFieldName, matchedQueries)); + } + } } } }; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index 5f3ff5264497a..a924c0e323f96 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -9,7 +9,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; @@ -24,6 +26,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.lookup.LeafDocLookup; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -36,6 +39,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -335,4 +339,93 @@ public void testRangeQueriesWithNow() throws Exception { } } + public void testPercolateNamedQueries() { + String mapping = """ + { + "dynamic" : "strict", + "properties" : { + "my_query" : { "type" : "percolator" }, + "description" : { "type" : "text"}, + "num_of_bedrooms" : { "type" : "integer"}, + "type" : { "type" : "keyword"}, + "price": { "type": "float"} + } + } + """; + indicesAdmin().prepareCreate("houses").setMapping(mapping).get(); + String source = """ + { + "my_query" : { + "bool": { + "should": [ + { "match": { "description": { "query": "fireplace", "_name": "fireplace_query" } } }, + { "match": { "type": { "query": "detached", "_name": "detached_query" } } } + ], + "filter": { + "match": { + "num_of_bedrooms": {"query": 3, "_name": "3_bedrooms_query"} + } + } + } + } + } + """; + prepareIndex("houses").setId("query_3_bedroom_detached_house_with_fireplace").setSource(source, XContentType.JSON).get(); + indicesAdmin().prepareRefresh().get(); + + source = """ + { + "my_query" : { + "bool": { + "filter": [ + { "match": { "description": { "query": "swimming pool", "_name": "swimming_pool_query" } } }, + { "match": { "num_of_bedrooms": {"query": 3, "_name": "3_bedrooms_query"} } } + ] + } + } + } + """; + prepareIndex("houses").setId("query_3_bedroom_house_with_swimming_pool").setSource(source, XContentType.JSON).get(); + indicesAdmin().prepareRefresh().get(); + + BytesArray house1_doc = new BytesArray(""" + { + "description": "house with a beautiful fireplace and swimming pool", + "num_of_bedrooms": 3, + "type": "detached", + "price": 1000000 + } + """); + + BytesArray house2_doc = new BytesArray(""" + { + "description": "house has a wood burning fireplace", + "num_of_bedrooms": 3, + "type": "semi-detached", + "price": 500000 + } + """); + + QueryBuilder query = new PercolateQueryBuilder("my_query", List.of(house1_doc, house2_doc), XContentType.JSON); + SearchResponse response = client().prepareSearch("houses").setQuery(query).get(); + assertEquals(2, response.getHits().getTotalHits().value); + + SearchHit[] hits = response.getHits().getHits(); + assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); + assertThat( + hits[0].getFields().get("_percolator_document_slot_0_matched_queries").getValues(), + equalTo(Arrays.asList("fireplace_query", "detached_query", "3_bedrooms_query")) + ); + assertThat( + hits[0].getFields().get("_percolator_document_slot_1_matched_queries").getValues(), + equalTo(Arrays.asList("fireplace_query", "3_bedrooms_query")) + ); + + assertThat(hits[1].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0))); + assertThat( + hits[1].getFields().get("_percolator_document_slot_0_matched_queries").getValues(), + equalTo(Arrays.asList("swimming_pool_query", "3_bedrooms_query")) + ); + } + } diff --git a/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/20_matched_queries.yml b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/20_matched_queries.yml new file mode 100644 index 0000000000000..1e692bc43faba --- /dev/null +++ b/modules/percolator/src/yamlRestTest/resources/rest-api-spec/test/20_matched_queries.yml @@ -0,0 +1,125 @@ +setup: + - skip: + version: " - 8.12.99" + reason: "Displaying matched named queries within percolator queries was added in 8.13" + - do: + indices.create: + index: houses + body: + mappings: + dynamic: strict + properties: + my_query: + type: percolator + description: + type: text + num_of_bedrooms: + type: integer + type: + type: keyword + price: + type: integer + + - do: + index: + refresh: true + index: houses + id: query_3_bedroom_detached_house_with_fireplace + body: + my_query: + { + "bool": { + "should": [ + { "match": { "description": { "query": "fireplace"} } }, + { "match": { "type": { "query": "detached", "_name": "detached_query" } } } + ], + "filter": { + "match": { + "num_of_bedrooms": {"query": 3, "_name": "3_bedrooms_query"} + } + } + } + } + + - do: + index: + refresh: true + index: houses + id: query_3_bedroom_house_with_swimming_pool + body: + my_query: + { + "bool": { + "filter": [ + { "match": { "description": { "query": "swimming pool", "_name": "swimming_pool_query" } } }, + { "match": { "num_of_bedrooms": {"query": 3, "_name": "3_bedrooms_query"} } } + ] + } + } + +--- +"Matched named queries within percolator queries: percolate existing document": + - do: + index: + refresh: true + index: houses + id: house1 + body: + description: "house with a beautiful fireplace and swimming pool" + num_of_bedrooms: 3 + type: detached + price: 1000000 + + - do: + search: + index: houses + body: + query: + percolate: + field: my_query + index: houses + id: house1 + + - match: { hits.total.value: 2 } + + - match: { hits.hits.0._id: query_3_bedroom_detached_house_with_fireplace } + - match: { hits.hits.0.fields._percolator_document_slot: [0] } + - match: { hits.hits.0.fields._percolator_document_slot_0_matched_queries: ["detached_query", "3_bedrooms_query"] } + + - match: { hits.hits.1._id: query_3_bedroom_house_with_swimming_pool } + - match: { hits.hits.1.fields._percolator_document_slot: [0] } + - match: { hits.hits.1.fields._percolator_document_slot_0_matched_queries: ["swimming_pool_query", "3_bedrooms_query"] } + + +--- +"Matched named queries within percolator queries: percolate multiple documents in request": + - do: + search: + index: houses + body: + query: + percolate: + field: my_query + documents: + - { + "description": "house with a beautiful fireplace and swimming pool", + "num_of_bedrooms": 3, + "type": "detached", + "price": 1000000 + } + - { + "description": "house has a wood burning fireplace", + "num_of_bedrooms": 3, + "type": "semi-detached", + "price": 500000 + } + + - match: { hits.total.value: 2 } + + - match: { hits.hits.0._id: query_3_bedroom_detached_house_with_fireplace } + - match: { hits.hits.0.fields._percolator_document_slot: [0, 1] } + - match: { hits.hits.0.fields._percolator_document_slot_0_matched_queries: ["detached_query", "3_bedrooms_query"] } + + - match: { hits.hits.1._id: query_3_bedroom_house_with_swimming_pool } + - match: { hits.hits.1.fields._percolator_document_slot: [0] } + - match: { hits.hits.1.fields._percolator_document_slot_0_matched_queries: ["swimming_pool_query", "3_bedrooms_query"] } diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index 50dea29683540..a4f939fbe3af8 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.reindex; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractMultiClustersTestCase; import java.util.Collection; @@ -60,11 +60,9 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { - SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") - .setQuery(new MatchAllQueryBuilder()) - .setSize(1000) - .get(); - final TotalHits totalHits = resp.getHits().getTotalHits(); + final TotalHits totalHits = SearchResponseUtils.getTotalHits( + client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) + ); return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; })); } @@ -77,11 +75,9 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("test-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { - SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("test-index-001") - .setQuery(new MatchAllQueryBuilder()) - .setSize(1000) - .get(); - final TotalHits totalHits = resp.getHits().getTotalHits(); + final TotalHits totalHits = SearchResponseUtils.getTotalHits( + client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) + ); return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; })); } @@ -108,11 +104,9 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception } assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { - SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("test-index-001") - .setQuery(new MatchAllQueryBuilder()) - .setSize(1000) - .get(); - final TotalHits totalHits = resp.getHits().getTotalHits(); + final TotalHits totalHits = SearchResponseUtils.getTotalHits( + client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) + ); return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; })); } @@ -142,11 +136,9 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { - SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") - .setQuery(new MatchAllQueryBuilder()) - .setSize(1000) - .get(); - final TotalHits totalHits = resp.getHits().getTotalHits(); + final TotalHits totalHits = SearchResponseUtils.getTotalHits( + client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) + ); return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; })); } @@ -160,11 +152,9 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { - SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") - .setQuery(new MatchAllQueryBuilder()) - .setSize(1000) - .get(); - final TotalHits totalHits = resp.getHits().getTotalHits(); + final TotalHits totalHits = SearchResponseUtils.getTotalHits( + client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) + ); return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; })); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java index fac18c4f6f544..17dd1503e6c89 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequestBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.InternalSettingsPlugin; @@ -159,7 +160,7 @@ public void testDeleteByQueryWithRouting() throws Exception { String routing = String.valueOf(randomIntBetween(2, docs)); logger.info("--> counting documents with routing [{}]", routing); - long expected = prepareSearch().setSize(0).setRouting(routing).get().getHits().getTotalHits().value; + long expected = SearchResponseUtils.getTotalHitsValue(prepareSearch().setSize(0).setRouting(routing)); logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()); diff --git a/modules/repository-url/src/internalClusterTest/java/org/elasticsearch/repositories/url/URLSnapshotRestoreIT.java b/modules/repository-url/src/internalClusterTest/java/org/elasticsearch/repositories/url/URLSnapshotRestoreIT.java index a47b9d8b622b5..e0d8eb86613ba 100644 --- a/modules/repository-url/src/internalClusterTest/java/org/elasticsearch/repositories/url/URLSnapshotRestoreIT.java +++ b/modules/repository-url/src/internalClusterTest/java/org/elasticsearch/repositories/url/URLSnapshotRestoreIT.java @@ -26,6 +26,7 @@ import java.util.Collections; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -63,7 +64,7 @@ public void testUrlRepository() throws Exception { indexDoc("test-idx", Integer.toString(i), "foo", "bar" + i); } refresh(); - assertThat(client.prepareSearch("test-idx").setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(client.prepareSearch("test-idx").setSize(0), 100); logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client.admin() @@ -110,7 +111,7 @@ public void testUrlRepository() throws Exception { .get(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); - assertThat(client.prepareSearch("test-idx").setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(client.prepareSearch("test-idx").setSize(0), 100); logger.info("--> list available shapshots"); GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("url-repo").get(); diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 706fb057cc8ee..c1e2888c47c62 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -8,10 +8,8 @@ package org.elasticsearch.index.mapper.murmur3; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.hash.MurmurHash3; @@ -36,15 +34,6 @@ public class Murmur3FieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "murmur3"; - public static class Defaults { - public static final FieldType FIELD_TYPE; - static { - FieldType ft = new FieldType(); - ft.setIndexOptions(IndexOptions.NONE); - FIELD_TYPE = freezeAndDeduplicateFieldType(ft); - } - } - private static Murmur3FieldMapper toType(FieldMapper in) { return (Murmur3FieldMapper) in; } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index 16e8d2610f3fb..313dcdd6623c4 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -276,6 +277,6 @@ public void testReplicationFactorOverReplicationMax() { } private long count(Client client, String index) { - return client.prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + return SearchResponseUtils.getTotalHitsValue(client.prepareSearch(index).setSize(0)); } } diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index 7c1514d2d1a6a..b818de468ea2c 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; +import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; @@ -50,6 +51,7 @@ import java.util.Map; import java.util.Set; import java.util.function.BiPredicate; +import java.util.function.Predicate; import static java.util.Collections.unmodifiableList; @@ -159,7 +161,6 @@ public void initSearchClient() throws IOException { Tuple versionVersionTuple = readVersionsFromCatNodes(adminSearchClient); final Version esVersion = versionVersionTuple.v1(); - final Version masterVersion = versionVersionTuple.v2(); final String os = readOsFromNodesInfo(adminSearchClient); searchYamlTestClient = new TestCandidateAwareClient( @@ -167,7 +168,7 @@ public void initSearchClient() throws IOException { searchClient, hosts, esVersion, - masterVersion, + ESRestTestCase::clusterHasFeature, os, this::getClientBuilderWithSniffedHosts ); @@ -328,11 +329,11 @@ static class TestCandidateAwareClient extends ClientYamlTestClient { RestClient restClient, List hosts, Version esVersion, - Version masterVersion, + Predicate clusterFeaturesPredicate, String os, CheckedSupplier clientBuilderWithSniffedNodes ) { - super(restSpec, restClient, hosts, esVersion, masterVersion, os, clientBuilderWithSniffedNodes); + super(restSpec, restClient, hosts, esVersion, clusterFeaturesPredicate, os, clientBuilderWithSniffedNodes); } public void setTestCandidate(ClientYamlTestCandidate testCandidate) { diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index 4fc82bb77fbb6..51d499db61932 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT.TestCandidateAwareClient; import org.junit.AfterClass; @@ -222,7 +223,6 @@ public void initSearchClient() throws IOException { Tuple versionVersionTuple = readVersionsFromCatNodes(adminSearchClient); final Version esVersion = versionVersionTuple.v1(); - final Version masterVersion = versionVersionTuple.v2(); final String os = readOsFromNodesInfo(adminSearchClient); searchYamlTestClient = new TestCandidateAwareClient( @@ -230,7 +230,7 @@ public void initSearchClient() throws IOException { searchClient, hosts, esVersion, - masterVersion, + ESRestTestCase::clusterHasFeature, os, this::getClientBuilderWithSniffedHosts ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 36f317474f5a9..f11144d698242 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -14,8 +14,8 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.TransportShardFlushAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; @@ -502,14 +502,14 @@ public void testCloseIndex() { } public void testDeleteIndex() { - interceptTransportActions(DeleteIndexAction.NAME); + interceptTransportActions(TransportDeleteIndexAction.TYPE.name()); String[] randomIndicesOrAliases = randomUniqueIndices(); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(randomIndicesOrAliases); assertAcked(internalCluster().coordOnlyNodeClient().admin().indices().delete(deleteIndexRequest).actionGet()); clearInterceptedActions(); - assertSameIndices(deleteIndexRequest, DeleteIndexAction.NAME); + assertSameIndices(deleteIndexRequest, TransportDeleteIndexAction.TYPE.name()); } public void testGetMappings() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 05e3b81c3683f..45906abd29ff8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -7,14 +7,18 @@ */ package org.elasticsearch.action.admin; +import org.apache.logging.log4j.Level; import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; +import org.elasticsearch.common.ReferenceDocs; +import org.elasticsearch.common.logging.ChunkedLoggingStreamTests; import org.elasticsearch.core.TimeValue; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matcher; import java.util.Map; @@ -29,6 +33,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; @@ -176,4 +181,25 @@ public void testTimestampAndParams() throws ExecutionException, InterruptedExcep } } } + + @TestLogging(reason = "testing logging at various levels", value = "org.elasticsearch.action.admin.HotThreadsIT:TRACE") + public void testLogLocalHotThreads() { + final var level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); + assertThat( + ChunkedLoggingStreamTests.getDecodedLoggedBody( + logger, + level, + getTestName(), + ReferenceDocs.LOGGING, + () -> HotThreads.logLocalHotThreads(logger, level, getTestName(), ReferenceDocs.LOGGING) + ).utf8ToString(), + allOf( + containsString("Hot threads at"), + containsString("interval=500ms"), + containsString("busiestThreads=500"), + containsString("ignoreIdleThreads=false"), + containsString("cpu usage by thread") + ) + ); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionIT.java index c9f3b0202b111..a4cf7843beb41 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionIT.java @@ -40,7 +40,7 @@ public void testDesiredBalanceOnMultiNodeCluster() throws Exception { var clusterHealthResponse = clusterAdmin().health(new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)).get(); assertEquals(RestStatus.OK, clusterHealthResponse.status()); - DesiredBalanceResponse desiredBalanceResponse = client().execute(GetDesiredBalanceAction.INSTANCE, new DesiredBalanceRequest()) + DesiredBalanceResponse desiredBalanceResponse = client().execute(TransportGetDesiredBalanceAction.TYPE, new DesiredBalanceRequest()) .get(); assertEquals(1, desiredBalanceResponse.getRoutingTable().size()); @@ -75,7 +75,7 @@ public void testDesiredBalanceWithUnassignedShards() throws Exception { var clusterHealthResponse = clusterAdmin().health(new ClusterHealthRequest(index).waitForStatus(ClusterHealthStatus.YELLOW)).get(); assertEquals(RestStatus.OK, clusterHealthResponse.status()); - DesiredBalanceResponse desiredBalanceResponse = client().execute(GetDesiredBalanceAction.INSTANCE, new DesiredBalanceRequest()) + DesiredBalanceResponse desiredBalanceResponse = client().execute(TransportGetDesiredBalanceAction.TYPE, new DesiredBalanceRequest()) .get(); assertEquals(1, desiredBalanceResponse.getRoutingTable().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java index 76d456bae1c06..a3c1304cfbae9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDesiredNodesActionsIT.java @@ -271,7 +271,9 @@ public void testDeleteDesiredNodesTasksAreBatchedCorrectly() throws Exception { final List> deleteDesiredNodesFutures = new ArrayList<>(15); for (int i = 0; i < 15; i++) { - deleteDesiredNodesFutures.add(client().execute(DeleteDesiredNodesAction.INSTANCE, new DeleteDesiredNodesAction.Request())); + deleteDesiredNodesFutures.add( + client().execute(TransportDeleteDesiredNodesAction.TYPE, new TransportDeleteDesiredNodesAction.Request()) + ); } for (ActionFuture future : deleteDesiredNodesFutures) { @@ -347,8 +349,8 @@ private UpdateDesiredNodesRequest randomDryRunUpdateDesiredNodesRequest(Version } private void deleteDesiredNodes() { - final DeleteDesiredNodesAction.Request request = new DeleteDesiredNodesAction.Request(); - client().execute(DeleteDesiredNodesAction.INSTANCE, request).actionGet(); + final TransportDeleteDesiredNodesAction.Request request = new TransportDeleteDesiredNodesAction.Request(); + client().execute(TransportDeleteDesiredNodesAction.TYPE, request).actionGet(); } private DesiredNodes getLatestDesiredNodes() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java index d99ebe6a3e2e7..cb508334f835e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java @@ -142,10 +142,7 @@ protected Collection> getPlugins() { return List.of(TestPlugin.class); } - private static final ActionType TEST_ACTION = new ActionType<>( - TestTransportAction.NAME, - in -> ActionResponse.Empty.INSTANCE - ); + private static final ActionType TEST_ACTION = ActionType.emptyResponse(TestTransportAction.NAME); public static class TestPlugin extends Plugin implements ActionPlugin { volatile CyclicBarrier barrier; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 09c14df3566af..d3cbab2760747 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -8,10 +8,10 @@ package org.elasticsearch.cluster; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; @@ -111,7 +111,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { String masterNode = internalCluster().getMasterName(); String otherNode = node1Name.equals(masterNode) ? node2Name : node1Name; logger.info("--> add voting config exclusion for non-master node, to be sure it's not elected"); - client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(otherNode)).get(); + client().execute(TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(otherNode)).get(); logger.info("--> stop master node, no master block should appear"); Settings masterDataPathSettings = internalCluster().dataPathSettings(masterNode); internalCluster().stopNode(masterNode); @@ -156,12 +156,12 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> clearing voting config exclusions"); ClearVotingConfigExclusionsRequest clearRequest = new ClearVotingConfigExclusionsRequest(); clearRequest.setWaitForRemoval(false); - client().execute(ClearVotingConfigExclusionsAction.INSTANCE, clearRequest).get(); + client().execute(TransportClearVotingConfigExclusionsAction.TYPE, clearRequest).get(); masterNode = internalCluster().getMasterName(); otherNode = node1Name.equals(masterNode) ? node2Name : node1Name; logger.info("--> add voting config exclusion for master node, to be sure it's not elected"); - client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(masterNode)).get(); + client().execute(TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(masterNode)).get(); logger.info("--> stop non-master node, no master block should appear"); Settings otherNodeDataPathSettings = internalCluster().dataPathSettings(otherNode); internalCluster().stopNode(otherNode); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 23c13a3dbf579..aa54e46389676 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.cluster; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -320,7 +320,7 @@ public void testNoMasterActionsMetadataWriteMasterBlock() throws Exception { .toList(); client().execute( - AddVotingConfigExclusionsAction.INSTANCE, + TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(nodesWithShards.toArray(new String[0])) ).get(); ensureGreen("test1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 93d714c79c391..43506647f89ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.cluster; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.index.query.QueryBuilders; @@ -113,7 +113,7 @@ public void testElectOnlyBetweenMasterNodes() throws Exception { ); logger.info("--> closing master node (1)"); - client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(masterNodeName)).get(); + client().execute(TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(masterNodeName)).get(); // removing the master from the voting configuration immediately triggers the master to step down assertBusy(() -> { assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java index dee6ac3859b15..b0cc81bf34811 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java @@ -8,8 +8,8 @@ package org.elasticsearch.cluster.coordination; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Priority; @@ -43,7 +43,7 @@ public void testAbdicateAfterVotingConfigExclusionAdded() throws ExecutionExcept final String originalMaster = internalCluster().getMasterName(); logger.info("--> excluding master node {}", originalMaster); - client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(originalMaster)).get(); + client().execute(TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(originalMaster)).get(); clusterAdmin().prepareHealth().setWaitForEvents(Priority.LANGUID).get(); assertNotEquals(originalMaster, internalCluster().getMasterName()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index f05a83e861e52..3baabe4cc888e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -8,10 +8,10 @@ package org.elasticsearch.gateway; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -316,7 +316,7 @@ public void testTwoNodeFirstNodeCleared() throws Exception { Map primaryTerms = assertAndCapturePrimaryTerms(null); - client().execute(AddVotingConfigExclusionsAction.INSTANCE, new AddVotingConfigExclusionsRequest(firstNode)).get(); + client().execute(TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(firstNode)).get(); internalCluster().fullRestart(new RestartCallback() { @Override @@ -342,7 +342,7 @@ public boolean clearData(String nodeName) { assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } - client().execute(ClearVotingConfigExclusionsAction.INSTANCE, new ClearVotingConfigExclusionsRequest()).get(); + client().execute(TransportClearVotingConfigExclusionsAction.TYPE, new ClearVotingConfigExclusionsRequest()).get(); } public void testLatestVersionLoaded() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index 1a8f928d9c10f..199a397f52ad2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; @@ -48,14 +49,7 @@ public class SearchIdleIT extends ESSingleNodeTestCase { public void testAutomaticRefreshSearch() throws InterruptedException { - runTestAutomaticRefresh(numDocs -> { - var resp = client().prepareSearch("test").get(); - try { - return resp.getHits().getTotalHits().value; - } finally { - resp.decRef(); - } - }); + runTestAutomaticRefresh(numDocs -> SearchResponseUtils.getTotalHitsValue(client().prepareSearch("test"))); } public void testAutomaticRefreshGet() throws InterruptedException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 2cbc3477cb49d..d40d2e02415b1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -782,6 +782,7 @@ public Settings onNodeStopped(String nodeName) { * Tests shard recovery throttling on the target node. Node statistics should show throttling time on the target node, while no * throttling should be shown on the source node because the target will accept data more slowly than the source's throttling threshold. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103204") public void testTargetThrottling() throws Exception { logger.info("--> starting node A with default settings"); final String nodeA = internalCluster().startNode(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/RepositoriesFileSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/RepositoriesFileSettingsIT.java index 91cd5e0e6e971..9d6a53d8bc818 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/RepositoriesFileSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/RepositoriesFileSettingsIT.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.reservedstate.ReservedRepositoryAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -151,7 +151,7 @@ private void assertClusterStateSaveOK(CountDownLatch savedClusterState, AtomicLo + "with errors: [[repo] set as read-only by [file_settings]]", expectThrows( IllegalArgumentException.class, - () -> client().execute(PutRepositoryAction.INSTANCE, sampleRestRequest("repo")).actionGet() + () -> client().execute(TransportPutRepositoryAction.TYPE, sampleRestRequest("repo")).actionGet() ).getMessage() ); } @@ -211,7 +211,7 @@ private void assertClusterStateNotSaved(CountDownLatch savedClusterState, Atomic ); // This should succeed, nothing was reserved - client().execute(PutRepositoryAction.INSTANCE, sampleRestRequest("err-repo")).get(); + client().execute(TransportPutRepositoryAction.TYPE, sampleRestRequest("err-repo")).get(); } public void testErrorSaved() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 59fc54347d1d5..089f6c09806cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -965,8 +965,7 @@ public void testRetentionLeasesClearedOnRestore() throws Exception { final String leaseId = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); logger.debug("--> adding retention lease with id {} to {}", leaseId, shardId); - client().execute(RetentionLeaseActions.Add.INSTANCE, new RetentionLeaseActions.AddRequest(shardId, leaseId, RETAIN_ALL, "test")) - .actionGet(); + client().execute(RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(shardId, leaseId, RETAIN_ALL, "test")).actionGet(); final ShardStats shardStats = Arrays.stream(indicesAdmin().prepareStats(indexName).get().getShards()) .filter(s -> s.getShardRouting().shardId().equals(shardId)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java index 2005d63ab6413..a6c8e0b08c9ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -277,7 +277,7 @@ public void testRepositoryConflict() throws Exception { assertTrue( clusterAdmin().prepareListTasks() - .setActions(DeleteSnapshotAction.NAME) + .setActions(TransportDeleteSnapshotAction.TYPE.name()) .setDetailed(true) .get() .getTasks() diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index ae5835686425d..aebe4922e416a 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -184,29 +184,6 @@ public String toString() { }; } - /** - * Adds a wrapper around a listener which catches exceptions thrown by its {@link #onResponse} method and feeds them to its - * {@link #onFailure} method. - */ - static ActionListener wrap(ActionListener delegate) { - return new ActionListener<>() { - @Override - public void onResponse(Response response) { - ActionListener.run(delegate, l -> l.onResponse(response)); - } - - @Override - public void onFailure(Exception e) { - safeOnFailure(delegate, e); - } - - @Override - public String toString() { - return "wrapped{" + delegate + "}"; - } - }; - } - /** * Notifies every given listener with the response passed to {@link #onResponse(Object)}. If a listener itself throws an exception * the exception is forwarded to {@link #onFailure(Exception)}. If in turn {@link #onFailure(Exception)} fails all remaining diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 01e51d47722f6..2039acda89b8a 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -10,20 +10,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; -import org.elasticsearch.action.admin.cluster.allocation.DeleteDesiredBalanceAction; -import org.elasticsearch.action.admin.cluster.allocation.GetDesiredBalanceAction; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.TransportDeleteDesiredBalanceAction; import org.elasticsearch.action.admin.cluster.allocation.TransportGetDesiredBalanceAction; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.coordination.ClusterFormationInfoAction; import org.elasticsearch.action.admin.cluster.coordination.CoordinationDiagnosticsAction; import org.elasticsearch.action.admin.cluster.coordination.MasterHistoryAction; -import org.elasticsearch.action.admin.cluster.desirednodes.DeleteDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.GetDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.TransportDeleteDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.TransportGetDesiredNodesAction; @@ -52,11 +46,9 @@ import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.TransportVerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; @@ -68,11 +60,9 @@ import org.elasticsearch.action.admin.cluster.settings.TransportClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; -import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.clone.TransportCloneSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateAction; import org.elasticsearch.action.admin.cluster.snapshots.features.SnapshottableFeaturesAction; @@ -91,11 +81,9 @@ import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetScriptContextAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetScriptLanguageAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportGetScriptContextAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportGetScriptLanguageAction; @@ -116,15 +104,12 @@ import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; -import org.elasticsearch.action.admin.indices.dangling.delete.DeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.delete.TransportDeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.find.FindDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.find.TransportFindDanglingIndexAction; -import org.elasticsearch.action.admin.indices.dangling.import_index.ImportDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.import_index.TransportImportDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesAction; import org.elasticsearch.action.admin.indices.dangling.list.TransportListDanglingIndicesAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; @@ -664,11 +649,11 @@ public void reg actions.register(PrevalidateNodeRemovalAction.INSTANCE, TransportPrevalidateNodeRemovalAction.class); actions.register(HealthApiStatsAction.INSTANCE, HealthApiStatsTransportAction.class); - actions.register(AddVotingConfigExclusionsAction.INSTANCE, TransportAddVotingConfigExclusionsAction.class); - actions.register(ClearVotingConfigExclusionsAction.INSTANCE, TransportClearVotingConfigExclusionsAction.class); - actions.register(ClusterAllocationExplainAction.INSTANCE, TransportClusterAllocationExplainAction.class); - actions.register(GetDesiredBalanceAction.INSTANCE, TransportGetDesiredBalanceAction.class); - actions.register(DeleteDesiredBalanceAction.INSTANCE, TransportDeleteDesiredBalanceAction.class); + actions.register(TransportAddVotingConfigExclusionsAction.TYPE, TransportAddVotingConfigExclusionsAction.class); + actions.register(TransportClearVotingConfigExclusionsAction.TYPE, TransportClearVotingConfigExclusionsAction.class); + actions.register(TransportClusterAllocationExplainAction.TYPE, TransportClusterAllocationExplainAction.class); + actions.register(TransportGetDesiredBalanceAction.TYPE, TransportGetDesiredBalanceAction.class); + actions.register(TransportDeleteDesiredBalanceAction.TYPE, TransportDeleteDesiredBalanceAction.class); actions.register(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); actions.register(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); actions.register(TransportClusterHealthAction.TYPE, TransportClusterHealthAction.class); @@ -678,15 +663,15 @@ public void reg actions.register(ClusterSearchShardsAction.INSTANCE, TransportClusterSearchShardsAction.class); actions.register(ClusterFormationInfoAction.INSTANCE, ClusterFormationInfoAction.TransportAction.class); actions.register(TransportPendingClusterTasksAction.TYPE, TransportPendingClusterTasksAction.class); - actions.register(PutRepositoryAction.INSTANCE, TransportPutRepositoryAction.class); + actions.register(TransportPutRepositoryAction.TYPE, TransportPutRepositoryAction.class); actions.register(GetRepositoriesAction.INSTANCE, TransportGetRepositoriesAction.class); - actions.register(DeleteRepositoryAction.INSTANCE, TransportDeleteRepositoryAction.class); + actions.register(TransportDeleteRepositoryAction.TYPE, TransportDeleteRepositoryAction.class); actions.register(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); actions.register(CleanupRepositoryAction.INSTANCE, TransportCleanupRepositoryAction.class); actions.register(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); - actions.register(DeleteSnapshotAction.INSTANCE, TransportDeleteSnapshotAction.class); + actions.register(TransportDeleteSnapshotAction.TYPE, TransportDeleteSnapshotAction.class); actions.register(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); - actions.register(CloneSnapshotAction.INSTANCE, TransportCloneSnapshotAction.class); + actions.register(TransportCloneSnapshotAction.TYPE, TransportCloneSnapshotAction.class); actions.register(RestoreSnapshotAction.INSTANCE, TransportRestoreSnapshotAction.class); actions.register(SnapshotsStatusAction.INSTANCE, TransportSnapshotsStatusAction.class); actions.register(SnapshottableFeaturesAction.INSTANCE, TransportSnapshottableFeaturesAction.class); @@ -701,7 +686,7 @@ public void reg actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); actions.register(ResizeAction.INSTANCE, TransportResizeAction.class); actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class); - actions.register(DeleteIndexAction.INSTANCE, TransportDeleteIndexAction.class); + actions.register(TransportDeleteIndexAction.TYPE, TransportDeleteIndexAction.class); actions.register(GetIndexAction.INSTANCE, TransportGetIndexAction.class); actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); actions.register(TransportCloseIndexAction.TYPE, TransportCloseIndexAction.class); @@ -764,9 +749,9 @@ public void reg actions.register(CoordinationDiagnosticsAction.INSTANCE, CoordinationDiagnosticsAction.TransportAction.class); // Indexed scripts - actions.register(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class); + actions.register(TransportPutStoredScriptAction.TYPE, TransportPutStoredScriptAction.class); actions.register(GetStoredScriptAction.INSTANCE, TransportGetStoredScriptAction.class); - actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class); + actions.register(TransportDeleteStoredScriptAction.TYPE, TransportDeleteStoredScriptAction.class); actions.register(GetScriptContextAction.INSTANCE, TransportGetScriptContextAction.class); actions.register(GetScriptLanguageAction.INSTANCE, TransportGetScriptLanguageAction.class); @@ -786,14 +771,14 @@ public void reg actions.register(RemovePersistentTaskAction.INSTANCE, RemovePersistentTaskAction.TransportAction.class); // retention leases - actions.register(RetentionLeaseActions.Add.INSTANCE, RetentionLeaseActions.Add.TransportAction.class); - actions.register(RetentionLeaseActions.Renew.INSTANCE, RetentionLeaseActions.Renew.TransportAction.class); - actions.register(RetentionLeaseActions.Remove.INSTANCE, RetentionLeaseActions.Remove.TransportAction.class); + actions.register(RetentionLeaseActions.ADD, RetentionLeaseActions.TransportAddAction.class); + actions.register(RetentionLeaseActions.RENEW, RetentionLeaseActions.TransportRenewAction.class); + actions.register(RetentionLeaseActions.REMOVE, RetentionLeaseActions.TransportRemoveAction.class); // Dangling indices actions.register(ListDanglingIndicesAction.INSTANCE, TransportListDanglingIndicesAction.class); - actions.register(ImportDanglingIndexAction.INSTANCE, TransportImportDanglingIndexAction.class); - actions.register(DeleteDanglingIndexAction.INSTANCE, TransportDeleteDanglingIndexAction.class); + actions.register(TransportImportDanglingIndexAction.TYPE, TransportImportDanglingIndexAction.class); + actions.register(TransportDeleteDanglingIndexAction.TYPE, TransportDeleteDanglingIndexAction.class); actions.register(FindDanglingIndexAction.INSTANCE, TransportFindDanglingIndexAction.class); // internal actions @@ -810,7 +795,7 @@ public void reg // desired nodes actions.register(GetDesiredNodesAction.INSTANCE, TransportGetDesiredNodesAction.class); actions.register(UpdateDesiredNodesAction.INSTANCE, TransportUpdateDesiredNodesAction.class); - actions.register(DeleteDesiredNodesAction.INSTANCE, TransportDeleteDesiredNodesAction.class); + actions.register(TransportDeleteDesiredNodesAction.TYPE, TransportDeleteDesiredNodesAction.class); actions.register(UpdateHealthInfoCacheAction.INSTANCE, UpdateHealthInfoCacheAction.TransportAction.class); actions.register(FetchHealthInfoCacheAction.INSTANCE, FetchHealthInfoCacheAction.TransportAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/ActionType.java b/server/src/main/java/org/elasticsearch/action/ActionType.java index 478fab0f2cf36..b8e4c8b88aa5e 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionType.java +++ b/server/src/main/java/org/elasticsearch/action/ActionType.java @@ -8,6 +8,7 @@ package org.elasticsearch.action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.Writeable; /** @@ -22,6 +23,14 @@ public static ActionType localOnly(String name) { return new ActionType<>(name, Writeable.Reader.localOnly()); } + public static ActionType emptyResponse(String name) { + return new ActionType<>(name, in -> ActionResponse.Empty.INSTANCE); + } + + public static ActionType acknowledgedResponse(String name) { + return new ActionType<>(name, AcknowledgedResponse::readFrom); + } + /** * @param name The name of the action, must be unique across actions. * @param responseReader A reader for the response type diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java deleted file mode 100644 index 34d5874cea3cb..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.allocation; - -import org.elasticsearch.action.ActionType; - -/** - * ActionType for explaining shard allocation for a shard in the cluster - */ -public class ClusterAllocationExplainAction extends ActionType { - - public static final ClusterAllocationExplainAction INSTANCE = new ClusterAllocationExplainAction(); - public static final String NAME = "cluster:monitor/allocation/explain"; - - private ClusterAllocationExplainAction() { - super(NAME, ClusterAllocationExplainResponse::new); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestBuilder.java index 6ceea15d8fd11..3053ebe1f3db9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestBuilder.java @@ -19,8 +19,8 @@ public class ClusterAllocationExplainRequestBuilder extends MasterNodeOperationR ClusterAllocationExplainResponse, ClusterAllocationExplainRequestBuilder> { - public ClusterAllocationExplainRequestBuilder(ElasticsearchClient client, ClusterAllocationExplainAction action) { - super(client, action, new ClusterAllocationExplainRequest()); + public ClusterAllocationExplainRequestBuilder(ElasticsearchClient client) { + super(client, TransportClusterAllocationExplainAction.TYPE, new ClusterAllocationExplainRequest()); } /** The index name to use when finding the shard to explain */ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DeleteDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DeleteDesiredBalanceAction.java deleted file mode 100644 index 23a2e75d5d401..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DeleteDesiredBalanceAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.allocation; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -public class DeleteDesiredBalanceAction extends ActionType { - - public static final DeleteDesiredBalanceAction INSTANCE = new DeleteDesiredBalanceAction(); - public static final String NAME = "cluster:admin/desired_balance/reset"; - - DeleteDesiredBalanceAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/GetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/GetDesiredBalanceAction.java deleted file mode 100644 index f9f90791c223f..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/GetDesiredBalanceAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.action.admin.cluster.allocation; - -import org.elasticsearch.action.ActionType; - -public class GetDesiredBalanceAction extends ActionType { - public static final GetDesiredBalanceAction INSTANCE = new GetDesiredBalanceAction(); - public static final String NAME = "cluster:admin/desired_balance/get"; - - GetDesiredBalanceAction() { - super(NAME, DesiredBalanceResponse::from); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java index 68302df47d6f2..7599eb2faef96 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterInfo; @@ -43,6 +44,10 @@ public class TransportClusterAllocationExplainAction extends TransportMasterNode ClusterAllocationExplainRequest, ClusterAllocationExplainResponse> { + public static final ActionType TYPE = new ActionType<>( + "cluster:monitor/allocation/explain", + ClusterAllocationExplainResponse::new + ); private static final Logger logger = LogManager.getLogger(TransportClusterAllocationExplainAction.class); private final ClusterInfoService clusterInfoService; @@ -63,7 +68,7 @@ public TransportClusterAllocationExplainAction( AllocationService allocationService ) { super( - ClusterAllocationExplainAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java index 4360d7c1925f6..76b563c3f540a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportDeleteDesiredBalanceAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -34,6 +35,7 @@ public class TransportDeleteDesiredBalanceAction extends TransportMasterNodeAction { + public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/desired_balance/reset"); @Nullable private final MasterServiceTaskQueue resetDesiredBalanceTaskQueue; @@ -48,7 +50,7 @@ public TransportDeleteDesiredBalanceAction( ShardsAllocator shardsAllocator ) { super( - DeleteDesiredBalanceAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java index fc11790079521..49611ffae8718 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterInfoService; @@ -42,6 +43,10 @@ public class TransportGetDesiredBalanceAction extends TransportMasterNodeReadAction { + public static final ActionType TYPE = new ActionType<>( + "cluster:admin/desired_balance/get", + DesiredBalanceResponse::from + ); @Nullable private final DesiredBalanceShardsAllocator desiredBalanceShardsAllocator; private final ClusterInfoService clusterInfoService; @@ -59,7 +64,7 @@ public TransportGetDesiredBalanceAction( WriteLoadForecaster writeLoadForecaster ) { super( - GetDesiredBalanceAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java deleted file mode 100644 index 7445096722c28..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.action.admin.cluster.configuration; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -public class AddVotingConfigExclusionsAction extends ActionType { - public static final AddVotingConfigExclusionsAction INSTANCE = new AddVotingConfigExclusionsAction(); - public static final String NAME = "cluster:admin/voting_config/add_exclusions"; - - private AddVotingConfigExclusionsAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java deleted file mode 100644 index 98f4dd62763e5..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.action.admin.cluster.configuration; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -public class ClearVotingConfigExclusionsAction extends ActionType { - public static final ClearVotingConfigExclusionsAction INSTANCE = new ClearVotingConfigExclusionsAction(); - public static final String NAME = "cluster:admin/voting_config/clear_exclusions"; - - private ClearVotingConfigExclusionsAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java index 57332429135b6..b9bcf0944cd83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -48,6 +49,7 @@ public class TransportAddVotingConfigExclusionsAction extends TransportMasterNod AddVotingConfigExclusionsRequest, ActionResponse.Empty> { + public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/voting_config/add_exclusions"); private static final Logger logger = LogManager.getLogger(TransportAddVotingConfigExclusionsAction.class); public static final Setting MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING = Setting.intSetting( @@ -73,7 +75,7 @@ public TransportAddVotingConfigExclusionsAction( Reconfigurator reconfigurator ) { super( - AddVotingConfigExclusionsAction.NAME, + TYPE.name(), false, transportService, clusterService, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java index 46069f01ecda3..113d085f51fdb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -42,6 +43,7 @@ public class TransportClearVotingConfigExclusionsAction extends TransportMasterN ClearVotingConfigExclusionsRequest, ActionResponse.Empty> { + public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/voting_config/clear_exclusions"); private static final Logger logger = LogManager.getLogger(TransportClearVotingConfigExclusionsAction.class); private final Reconfigurator reconfigurator; @@ -55,7 +57,7 @@ public TransportClearVotingConfigExclusionsAction( Reconfigurator reconfigurator ) { super( - ClearVotingConfigExclusionsAction.NAME, + TYPE.name(), false, transportService, clusterService, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DeleteDesiredNodesAction.java deleted file mode 100644 index 720f38e16a86a..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DeleteDesiredNodesAction.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.desirednodes; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -public class DeleteDesiredNodesAction extends ActionType { - public static final DeleteDesiredNodesAction INSTANCE = new DeleteDesiredNodesAction(); - public static final String NAME = "cluster:admin/desired_nodes/delete"; - - DeleteDesiredNodesAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } - - public static class Request extends AcknowledgedRequest { - public Request() {} - - public Request(StreamInput in) throws IOException { - super(in); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java index 48ea8beef2fd4..689e0579d1cbd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java @@ -9,8 +9,11 @@ package org.elasticsearch.action.admin.cluster.desirednodes; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskListener; @@ -23,14 +26,20 @@ import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportDeleteDesiredNodesAction extends TransportMasterNodeAction { +import java.io.IOException; +public class TransportDeleteDesiredNodesAction extends TransportMasterNodeAction< + TransportDeleteDesiredNodesAction.Request, + ActionResponse.Empty> { + + public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/desired_nodes/delete"); private final MasterServiceTaskQueue taskQueue; @Inject @@ -42,12 +51,12 @@ public TransportDeleteDesiredNodesAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - DeleteDesiredNodesAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, actionFilters, - DeleteDesiredNodesAction.Request::new, + Request::new, indexNameExpressionResolver, in -> ActionResponse.Empty.INSTANCE, EsExecutors.DIRECT_EXECUTOR_SERVICE @@ -56,17 +65,13 @@ public TransportDeleteDesiredNodesAction( } @Override - protected void masterOperation( - Task task, - DeleteDesiredNodesAction.Request request, - ClusterState state, - ActionListener listener - ) throws Exception { + protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) + throws Exception { taskQueue.submitTask("delete-desired-nodes", new DeleteDesiredNodesTask(listener), request.masterNodeTimeout()); } @Override - protected ClusterBlockException checkBlock(DeleteDesiredNodesAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @@ -93,4 +98,17 @@ public ClusterState afterBatchExecution(ClusterState clusterState, boolean clust return clusterState.copyAndUpdateMetadata(metadata -> metadata.removeCustom(DesiredNodesMetadata.TYPE)); } } + + public static class Request extends AcknowledgedRequest { + public Request() {} + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 3fabd53299674..ea56c85e36a3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.io.StringWriter; import java.util.List; public class TransportNodesHotThreadsAction extends TransportNodesAction< @@ -79,8 +80,9 @@ protected NodeHotThreads nodeOperation(NodeRequest request, Task task) { .interval(request.request.interval) .threadElementsSnapshotCount(request.request.snapshots) .ignoreIdleThreads(request.request.ignoreIdleThreads); - try { - return new NodeHotThreads(clusterService.localNode(), hotThreads.detect()); + try (var writer = new StringWriter()) { + hotThreads.detect(writer); + return new NodeHotThreads(clusterService.localNode(), writer.toString()); } catch (Exception e) { throw new ElasticsearchException("failed to detect hot threads", e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java deleted file mode 100644 index 590460e9025b6..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.repositories.delete; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -/** - * Unregister repository action - */ -public class DeleteRepositoryAction extends ActionType { - - public static final DeleteRepositoryAction INSTANCE = new DeleteRepositoryAction(); - public static final String NAME = "cluster:admin/repository/delete"; - - private DeleteRepositoryAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java index e2f614246b81c..6accb02418df8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java @@ -24,7 +24,7 @@ public class DeleteRepositoryRequestBuilder extends AcknowledgedRequestBuilder< * Constructs unregister repository request builder with specified repository name */ public DeleteRepositoryRequestBuilder(ElasticsearchClient client, String name) { - super(client, DeleteRepositoryAction.INSTANCE, new DeleteRepositoryRequest(name)); + super(client, TransportDeleteRepositoryAction.TYPE, new DeleteRepositoryRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java index b1f78408c7829..69568462731e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/TransportDeleteRepositoryAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.repositories.delete; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.repositories.reservedstate.ReservedRepositoryAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -33,6 +34,7 @@ */ public class TransportDeleteRepositoryAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/repository/delete"); private final RepositoriesService repositoriesService; @Inject @@ -45,7 +47,7 @@ public TransportDeleteRepositoryAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - DeleteRepositoryAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java deleted file mode 100644 index 3ac2134afef83..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.repositories.put; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -/** - * Register repository action - */ -public class PutRepositoryAction extends ActionType { - - public static final PutRepositoryAction INSTANCE = new PutRepositoryAction(); - public static final String NAME = "cluster:admin/repository/put"; - - private PutRepositoryAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java index 79195725ad962..86ed38c2ddad9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java @@ -28,7 +28,7 @@ public class PutRepositoryRequestBuilder extends AcknowledgedRequestBuilder< * Constructs register repository request for the repository with a given name */ public PutRepositoryRequestBuilder(ElasticsearchClient client, String name) { - super(client, PutRepositoryAction.INSTANCE, new PutRepositoryRequest(name)); + super(client, TransportPutRepositoryAction.TYPE, new PutRepositoryRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index bb17b0d8ab8fe..c6b471ff25bdf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.repositories.put; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.repositories.reservedstate.ReservedRepositoryAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -33,6 +34,7 @@ */ public class TransportPutRepositoryAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/repository/put"); private final RepositoriesService repositoriesService; @Inject @@ -45,7 +47,7 @@ public TransportPutRepositoryAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - PutRepositoryAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotAction.java deleted file mode 100644 index b73e8e3668cd2..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.clone; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -public final class CloneSnapshotAction extends ActionType { - - public static final CloneSnapshotAction INSTANCE = new CloneSnapshotAction(); - public static final String NAME = "cluster:admin/snapshot/clone"; - - private CloneSnapshotAction() { - super(NAME, AcknowledgedResponse::readFrom); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java index efa4c4895a12e..818f0fadf92ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java @@ -20,7 +20,7 @@ public class CloneSnapshotRequestBuilder extends MasterNodeOperationRequestBuild CloneSnapshotRequestBuilder> { public CloneSnapshotRequestBuilder(ElasticsearchClient client, String repository, String source, String target) { - super(client, CloneSnapshotAction.INSTANCE, new CloneSnapshotRequest(repository, source, target, Strings.EMPTY_ARRAY)); + super(client, TransportCloneSnapshotAction.TYPE, new CloneSnapshotRequest(repository, source, target, Strings.EMPTY_ARRAY)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/TransportCloneSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/TransportCloneSnapshotAction.java index 1a37cd0204c30..7ab8b704a3ee8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/TransportCloneSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/TransportCloneSnapshotAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.clone; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -29,6 +30,7 @@ */ public final class TransportCloneSnapshotAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/snapshot/clone"); private final SnapshotsService snapshotsService; @Inject @@ -41,7 +43,7 @@ public TransportCloneSnapshotAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - CloneSnapshotAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java deleted file mode 100644 index 9d5e30b604702..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.delete; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -/** - * Delete snapshot action - */ -public class DeleteSnapshotAction extends ActionType { - - public static final DeleteSnapshotAction INSTANCE = new DeleteSnapshotAction(); - public static final String NAME = "cluster:admin/snapshot/delete"; - - private DeleteSnapshotAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java index 8d2c8997b42e6..f18ed209ba11e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java @@ -24,7 +24,7 @@ public class DeleteSnapshotRequestBuilder extends MasterNodeOperationRequestBuil * Constructs delete snapshot request builder with specified repository and snapshot names */ public DeleteSnapshotRequestBuilder(ElasticsearchClient client, String repository, String... snapshots) { - super(client, DeleteSnapshotAction.INSTANCE, new DeleteSnapshotRequest(repository, snapshots)); + super(client, TransportDeleteSnapshotAction.TYPE, new DeleteSnapshotRequest(repository, snapshots)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index df7a5e5595055..39b03b479ffdf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -28,6 +29,7 @@ * Transport action for delete snapshot operation */ public class TransportDeleteSnapshotAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/snapshot/delete"); private final SnapshotsService snapshotsService; @Inject @@ -40,7 +42,7 @@ public TransportDeleteSnapshotAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - DeleteSnapshotAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java deleted file mode 100644 index 1ac899666a1eb..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.storedscripts; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -public class DeleteStoredScriptAction extends ActionType { - - public static final DeleteStoredScriptAction INSTANCE = new DeleteStoredScriptAction(); - public static final String NAME = "cluster:admin/script/delete"; - - private DeleteStoredScriptAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java index d8f22216073a5..ce074e17ebb75 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java @@ -18,7 +18,7 @@ public class DeleteStoredScriptRequestBuilder extends AcknowledgedRequestBuilder DeleteStoredScriptRequestBuilder> { public DeleteStoredScriptRequestBuilder(ElasticsearchClient client) { - super(client, DeleteStoredScriptAction.INSTANCE, new DeleteStoredScriptRequest()); + super(client, TransportDeleteStoredScriptAction.TYPE, new DeleteStoredScriptRequest()); } public DeleteStoredScriptRequestBuilder setId(String id) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java deleted file mode 100644 index f7506f379de8a..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.storedscripts; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -public class PutStoredScriptAction extends ActionType { - - public static final PutStoredScriptAction INSTANCE = new PutStoredScriptAction(); - public static final String NAME = "cluster:admin/script/put"; - - private PutStoredScriptAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java index 24f5900629cfb..9e353382f84a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java @@ -20,7 +20,7 @@ public class PutStoredScriptRequestBuilder extends AcknowledgedRequestBuilder< PutStoredScriptRequestBuilder> { public PutStoredScriptRequestBuilder(ElasticsearchClient client) { - super(client, PutStoredScriptAction.INSTANCE, new PutStoredScriptRequest()); + super(client, TransportPutStoredScriptAction.TYPE, new PutStoredScriptRequest()); } public PutStoredScriptRequestBuilder setId(String id) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java index dfb3745d4101a..829b00b7cc1c9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportDeleteStoredScriptAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -26,6 +27,8 @@ public class TransportDeleteStoredScriptAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/script/delete"); + @Inject public TransportDeleteStoredScriptAction( TransportService transportService, @@ -35,7 +38,7 @@ public TransportDeleteStoredScriptAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - DeleteStoredScriptAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java index 8025d983d2668..4fb0f68bce625 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportPutStoredScriptAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -26,6 +27,7 @@ public class TransportPutStoredScriptAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/script/put"); private final ScriptService scriptService; @Inject @@ -38,7 +40,7 @@ public TransportPutStoredScriptAction( ScriptService scriptService ) { super( - PutStoredScriptAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 66a489933c3ee..0001fec4e71e5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -59,7 +59,7 @@ public class TransportIndicesAliasesAction extends AcknowledgedTransportMasterNodeAction { public static final String NAME = "indices:admin/aliases"; - public static final ActionType TYPE = new ActionType<>(NAME, AcknowledgedResponse::readFrom); + public static final ActionType TYPE = ActionType.acknowledgedResponse(NAME); private static final Logger logger = LogManager.getLogger(TransportIndicesAliasesAction.class); private final MetadataIndexAliasesService indexAliasesService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexAction.java deleted file mode 100644 index 0435f603be8ac..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.dangling.delete; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -/** - * This action causes a dangling index to be considered as deleted by the cluster. - */ -public class DeleteDanglingIndexAction extends ActionType { - - public static final DeleteDanglingIndexAction INSTANCE = new DeleteDanglingIndexAction(); - public static final String NAME = "cluster:admin/indices/dangling/delete"; - - private DeleteDanglingIndexAction() { - super(NAME, AcknowledgedResponse::readFrom); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java index 1207c2c1e60ff..93fae72810ad0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/TransportDeleteDanglingIndexAction.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.indices.dangling.DanglingIndexInfo; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesAction; @@ -43,11 +44,12 @@ import java.util.stream.Collectors; /** - * Implements the deletion of a dangling index. When handling a {@link DeleteDanglingIndexAction}, + * Implements the deletion of a dangling index. When handling a {@link DeleteDanglingIndexRequest}, * this class first checks that such a dangling index exists. It then submits a cluster state update * to add the index to the index graveyard. */ public class TransportDeleteDanglingIndexAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/indices/dangling/delete"); private static final Logger logger = LogManager.getLogger(TransportDeleteDanglingIndexAction.class); private final Settings settings; @@ -64,7 +66,7 @@ public TransportDeleteDanglingIndexAction( NodeClient nodeClient ) { super( - DeleteDanglingIndexAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexAction.java deleted file mode 100644 index c64a8b81fc2de..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.dangling.import_index; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -/** - * Represents a request to import a particular dangling index. - */ -public class ImportDanglingIndexAction extends ActionType { - - public static final ImportDanglingIndexAction INSTANCE = new ImportDanglingIndexAction(); - public static final String NAME = "cluster:admin/indices/dangling/import"; - - private ImportDanglingIndexAction() { - super(NAME, AcknowledgedResponse::readFrom); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java index 0362128c6403a..0348b46bedcae 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/TransportImportDanglingIndexAction.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.indices.dangling.find.FindDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.find.FindDanglingIndexRequest; @@ -33,11 +34,12 @@ import java.util.stream.Collectors; /** - * Implements the import of a dangling index. When handling a {@link ImportDanglingIndexAction}, + * Implements the import of a dangling index. When handling a {@link ImportDanglingIndexRequest}, * this class first checks that such a dangling index exists. It then calls {@link LocalAllocateDangledIndices} * to perform the actual allocation. */ public class TransportImportDanglingIndexAction extends HandledTransportAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("cluster:admin/indices/dangling/import"); private static final Logger logger = LogManager.getLogger(TransportImportDanglingIndexAction.class); private final LocalAllocateDangledIndices danglingIndexAllocator; @@ -50,13 +52,7 @@ public TransportImportDanglingIndexAction( LocalAllocateDangledIndices danglingIndexAllocator, NodeClient nodeClient ) { - super( - ImportDanglingIndexAction.NAME, - transportService, - actionFilters, - ImportDanglingIndexRequest::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(TYPE.name(), transportService, actionFilters, ImportDanglingIndexRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.danglingIndexAllocator = danglingIndexAllocator; this.nodeClient = nodeClient; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java deleted file mode 100644 index c652375be2de0..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.delete; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -public class DeleteIndexAction extends ActionType { - - public static final DeleteIndexAction INSTANCE = new DeleteIndexAction(); - public static final String NAME = "indices:admin/delete"; - - private DeleteIndexAction() { - super(NAME, AcknowledgedResponse::readFrom); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java index a6ae02dddde20..5c0aec258176a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java @@ -19,7 +19,7 @@ public class DeleteIndexRequestBuilder extends AcknowledgedRequestBuilder< DeleteIndexRequestBuilder> { public DeleteIndexRequestBuilder(ElasticsearchClient client, String... indices) { - super(client, DeleteIndexAction.INSTANCE, new DeleteIndexRequest(indices)); + super(client, TransportDeleteIndexAction.TYPE, new DeleteIndexRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 8fe6e0b67e827..eff4fe24c10ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -36,6 +37,7 @@ */ public class TransportDeleteIndexAction extends AcknowledgedTransportMasterNodeAction { + public static final ActionType TYPE = ActionType.acknowledgedResponse("indices:admin/delete"); private static final Logger logger = LogManager.getLogger(TransportDeleteIndexAction.class); private final MetadataDeleteIndexService deleteIndexService; @@ -52,7 +54,7 @@ public TransportDeleteIndexAction( DestructiveOperations destructiveOperations ) { super( - DeleteIndexAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index cebb4ed6e06e6..fd10c509d8ef2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -94,8 +94,7 @@ private SubscribableListener(Object initialState) { * listener immediately with the result with which this listener was completed. Otherwise, the subscribing listener is retained and * completed when this listener is completed. *

- * Subscribed listeners must not throw any exceptions. Use {@link ActionListener#wrap(ActionListener)} if you have a listener for which - * exceptions from its {@link ActionListener#onResponse} method should be handled by its own {@link ActionListener#onFailure} method. + * Subscribed listeners must not throw any exceptions. *

* Listeners added strictly before this listener is completed will themselves be completed in the order in which their subscriptions * were received. However, there are no guarantees about the ordering of the completions of listeners which are added concurrently with @@ -113,8 +112,7 @@ public final void addListener(ActionListener listener) { * listener immediately with the result with which this listener was completed. Otherwise, the subscribing listener is retained and * completed when this listener is completed. *

- * Subscribed listeners must not throw any exceptions. Use {@link ActionListener#wrap(ActionListener)} if you have a listener for which - * exceptions from its {@link ActionListener#onResponse} method should be handled by its own {@link ActionListener#onFailure} method. + * Subscribed listeners must not throw any exceptions. *

* Listeners added strictly before this listener is completed will themselves be completed in the order in which their subscriptions * were received. However, there are no guarantees about the ordering of the completions of listeners which are added concurrently with diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 0a4951c8c4125..e37f248246920 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.CancellableFanOut; -import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.ClusterState; @@ -27,7 +26,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; @@ -83,32 +81,6 @@ protected TransportNodesAction( transportService.registerRequestHandler(transportNodeAction, finalExecutor, nodeRequest, new NodeTransportHandler()); } - /** - * @deprecated Use the local-only constructor instead. - */ - @Deprecated(forRemoval = true) - @SuppressWarnings("this-escape") - protected TransportNodesAction( - String actionName, - ThreadPool threadPool, - ClusterService clusterService, - TransportService transportService, - ActionFilters actionFilters, - Writeable.Reader requestReader, - Writeable.Reader nodeRequest, - Executor executor - ) { - this(actionName, clusterService, transportService, actionFilters, nodeRequest, executor); - transportService.registerRequestHandler( - actionName, - executor, - false, - true, - requestReader, - (request, channel, task) -> execute(task, request, new ChannelActionListener<>(channel)) - ); - } - @Override protected void doExecute(Task task, NodesRequest request, ActionListener listener) { // coordination can run on SAME because it's only O(#nodes) work diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 9f6d4ed27cf6c..67793fb525644 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -118,21 +118,13 @@ private void refreshUnpromotables( return; } - engineOrNull.addFlushListener(location, ActionListener.wrap(new ActionListener<>() { - @Override - public void onResponse(Long generation) { - try ( - ThreadContext.StoredContext ignore = transportService.getThreadPool() - .getThreadContext() - .stashWithOrigin(POST_WRITE_REFRESH_ORIGIN) - ) { - sendUnpromotableRequests(indexShard, generation, forced, listener, postWriteRefreshTimeout); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); + engineOrNull.addFlushListener(location, listener.delegateFailureAndWrap((l, generation) -> { + try ( + ThreadContext.StoredContext ignore = transportService.getThreadPool() + .getThreadContext() + .stashWithOrigin(POST_WRITE_REFRESH_ORIGIN) + ) { + sendUnpromotableRequests(indexShard, generation, forced, l, postWriteRefreshTimeout); } })); } diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 182e9ee497c07..21c01abd52437 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -16,10 +16,10 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequest; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainRequestBuilder; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; +import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -56,16 +56,16 @@ import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; +import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequestBuilder; +import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder; @@ -82,16 +82,16 @@ import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequestBuilder; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; -import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotRequestBuilder; +import org.elasticsearch.action.admin.cluster.snapshots.clone.TransportCloneSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder; +import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder; @@ -112,16 +112,16 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequestBuilder; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequestBuilder; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; -import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequestBuilder; +import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; +import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; @@ -147,16 +147,16 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.dangling.delete.DeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.delete.DeleteDanglingIndexRequest; -import org.elasticsearch.action.admin.indices.dangling.import_index.ImportDanglingIndexAction; +import org.elasticsearch.action.admin.indices.dangling.delete.TransportDeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.import_index.ImportDanglingIndexRequest; +import org.elasticsearch.action.admin.indices.dangling.import_index.TransportImportDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesAction; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesRequest; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; @@ -877,7 +877,7 @@ public void pendingClusterTasks(PendingClusterTasksRequest request, ActionListen @Override public void putRepository(PutRepositoryRequest request, ActionListener listener) { - execute(PutRepositoryAction.INSTANCE, request, listener); + execute(TransportPutRepositoryAction.TYPE, request, listener); } @Override @@ -907,7 +907,7 @@ public CloneSnapshotRequestBuilder prepareCloneSnapshot(String repository, Strin @Override public void cloneSnapshot(CloneSnapshotRequest request, ActionListener listener) { - execute(CloneSnapshotAction.INSTANCE, request, listener); + execute(TransportCloneSnapshotAction.TYPE, request, listener); } @Override @@ -922,7 +922,7 @@ public GetSnapshotsRequestBuilder prepareGetSnapshots(String... repositories) { @Override public void deleteSnapshot(DeleteSnapshotRequest request, ActionListener listener) { - execute(DeleteSnapshotAction.INSTANCE, request, listener); + execute(TransportDeleteSnapshotAction.TYPE, request, listener); } @Override @@ -932,7 +932,7 @@ public DeleteSnapshotRequestBuilder prepareDeleteSnapshot(String repository, Str @Override public void deleteRepository(DeleteRepositoryRequest request, ActionListener listener) { - execute(DeleteRepositoryAction.INSTANCE, request, listener); + execute(TransportDeleteRepositoryAction.TYPE, request, listener); } @Override @@ -1057,17 +1057,17 @@ public SimulatePipelineRequestBuilder prepareSimulatePipeline(BytesReference sou @Override public void allocationExplain(ClusterAllocationExplainRequest request, ActionListener listener) { - execute(ClusterAllocationExplainAction.INSTANCE, request, listener); + execute(TransportClusterAllocationExplainAction.TYPE, request, listener); } @Override public ActionFuture allocationExplain(ClusterAllocationExplainRequest request) { - return execute(ClusterAllocationExplainAction.INSTANCE, request); + return execute(TransportClusterAllocationExplainAction.TYPE, request); } @Override public ClusterAllocationExplainRequestBuilder prepareAllocationExplain() { - return new ClusterAllocationExplainRequestBuilder(this, ClusterAllocationExplainAction.INSTANCE); + return new ClusterAllocationExplainRequestBuilder(this); } @Override @@ -1087,22 +1087,22 @@ public void listDanglingIndices(ListDanglingIndicesRequest request, ActionListen @Override public ActionFuture importDanglingIndex(ImportDanglingIndexRequest request) { - return execute(ImportDanglingIndexAction.INSTANCE, request); + return execute(TransportImportDanglingIndexAction.TYPE, request); } @Override public void importDanglingIndex(ImportDanglingIndexRequest request, ActionListener listener) { - execute(ImportDanglingIndexAction.INSTANCE, request, listener); + execute(TransportImportDanglingIndexAction.TYPE, request, listener); } @Override public ActionFuture deleteDanglingIndex(DeleteDanglingIndexRequest request) { - return execute(DeleteDanglingIndexAction.INSTANCE, request); + return execute(TransportDeleteDanglingIndexAction.TYPE, request); } @Override public void deleteDanglingIndex(DeleteDanglingIndexRequest request, ActionListener listener) { - execute(DeleteDanglingIndexAction.INSTANCE, request, listener); + execute(TransportDeleteDanglingIndexAction.TYPE, request, listener); } @Override @@ -1117,13 +1117,13 @@ public PutStoredScriptRequestBuilder preparePutStoredScript() { @Override public void putStoredScript(final PutStoredScriptRequest request, ActionListener listener) { - execute(PutStoredScriptAction.INSTANCE, request, listener); + execute(TransportPutStoredScriptAction.TYPE, request, listener); } @Override public void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener listener) { - execute(DeleteStoredScriptAction.INSTANCE, request, listener); + execute(TransportDeleteStoredScriptAction.TYPE, request, listener); } @Override @@ -1239,12 +1239,12 @@ public CreateIndexRequestBuilder prepareCreate(String index) { @Override public ActionFuture delete(final DeleteIndexRequest request) { - return execute(DeleteIndexAction.INSTANCE, request); + return execute(TransportDeleteIndexAction.TYPE, request); } @Override public void delete(final DeleteIndexRequest request, final ActionListener listener) { - execute(DeleteIndexAction.INSTANCE, request, listener); + execute(TransportDeleteIndexAction.TYPE, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java index 05c0876669732..85e201d52f03b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java @@ -47,11 +47,18 @@ public static long getExpectedShardSize( if (indexMetadata.getResizeSourceIndex() != null && shard.active() == false && shard.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { + assert shard.primary() : "All replica shards are recovering from " + RecoverySource.Type.PEER; return getExpectedSizeOfResizedShard(shard, defaultValue, indexMetadata, clusterInfo, metadata, routingTable); - } else if (shard.unassigned() && shard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT) { + } else if (shard.active() == false && shard.recoverySource().getType() == RecoverySource.Type.SNAPSHOT) { + assert shard.primary() : "All replica shards are recovering from " + RecoverySource.Type.PEER; return snapshotShardSizeInfo.getShardSize(shard, defaultValue); } else { - return clusterInfo.getShardSize(shard, defaultValue); + var shardSize = clusterInfo.getShardSize(shard.shardId(), shard.primary()); + if (shardSize == null && shard.primary() == false) { + // derive replica size from corresponding primary + shardSize = clusterInfo.getShardSize(shard.shardId(), true); + } + return shardSize == null ? defaultValue : shardSize; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 0e0d15a02d042..e92a6106a6e33 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import java.util.Map; @@ -146,7 +147,7 @@ public static long sizeOfUnaccountedShards( routing, Math.max(routing.getExpectedShardSize(), 0L), clusterInfo, - null, + SnapshotShardSizeInfo.EMPTY, metadata, routingTable ); @@ -158,7 +159,7 @@ public static long sizeOfUnaccountedShards( if (subtractShardsMovingAway) { for (ShardRouting routing : node.relocating()) { if (dataPath.equals(clusterInfo.getDataPath(routing))) { - totalSize -= getExpectedShardSize(routing, 0L, clusterInfo, null, metadata, routingTable); + totalSize -= getExpectedShardSize(routing, 0L, clusterInfo, SnapshotShardSizeInfo.EMPTY, metadata, routingTable); } } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 0380bb80e0013..1d8a9ef1ce1c4 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.ChunkedLoggingStream; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -61,9 +60,7 @@ import java.io.Closeable; import java.io.IOException; -import java.io.OutputStreamWriter; import java.io.UncheckedIOException; -import java.nio.charset.StandardCharsets; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.DirectoryStream; import java.nio.file.FileStore; @@ -956,13 +953,7 @@ private void maybeLogThreadDump(ShardId shardId, String message) { return; } nextShardLockHotThreadsNanos = now + TimeUnit.SECONDS.toNanos(60); - final var hotThreads = new HotThreads().busiestThreads(500).ignoreIdleThreads(false).detect(); - try ( - var stream = ChunkedLoggingStream.create(logger, Level.DEBUG, prefix, ReferenceDocs.SHARD_LOCK_TROUBLESHOOTING); - var writer = new OutputStreamWriter(stream, StandardCharsets.UTF_8) - ) { - writer.write(hotThreads); - } + HotThreads.logLocalHotThreads(logger, Level.DEBUG, prefix, ReferenceDocs.SHARD_LOCK_TROUBLESHOOTING); } catch (Exception e) { logger.error(format("could not obtain %s", prefix), e); } finally { diff --git a/server/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/server/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index ca018e02a3f80..15b5ea48f0730 100644 --- a/server/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/server/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -307,7 +307,7 @@ private boolean hasAnyNodeFetching() { // visible for testing void asyncFetch(final DiscoveryNode[] nodes, long fetchingRound) { logger.trace("{} fetching [{}] from {}", shardId, type, nodes); - list(shardId, customDataPath, nodes, new ActionListener>() { + list(shardId, customDataPath, nodes, new ActionListener<>() { @Override public void onResponse(BaseNodesResponse response) { assert assertSameNodes(response); diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index 2dba5732a6981..f30a924eaa54e 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -261,7 +261,7 @@ protected void list( client.executeLocally( TransportNodesListGatewayStartedShards.TYPE, new TransportNodesListGatewayStartedShards.Request(shardId, customDataPath, nodes), - ActionListener.wrap(listener) + listener.safeMap(r -> r) // weaken type ); } } @@ -308,7 +308,7 @@ protected void list( client.executeLocally( TransportNodesListShardStoreMetadata.TYPE, new TransportNodesListShardStoreMetadata.Request(shardId, customDataPath, nodes), - ActionListener.wrap(listener) + listener.safeMap(r -> r) // weaken type ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index 946a0a1aa1718..af341e64661d1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -139,7 +139,7 @@ public static PostingsEnum leafLookup(LeafReader reader) throws IOException { return reader.postings(TERM); } - private class SyntheticFieldLoader implements SourceLoader.SyntheticFieldLoader { + private static class SyntheticFieldLoader implements SourceLoader.SyntheticFieldLoader { private PostingsEnum postings; private boolean hasValue; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java new file mode 100644 index 0000000000000..490d7f36219cf --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +/** + * Field type that uses an inference model. + */ +public interface InferenceModelFieldType { + /** + * Retrieve inference model used by the field type. + * + * @return model id used by the field type + */ + String getInferenceModel(); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 958db80ae64c2..4f3c4814517e5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -179,9 +179,6 @@ public SourceFieldMapper build() { return sourceFieldMapper; } - private IndexMode getIndexMode() { - return indexMode; - } } public static final TypeParser PARSER = new ConfigurableTypeParser( diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index e9d2223029e14..f86142ffbe862 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.NamedMatches; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; @@ -121,6 +122,9 @@ public final Query toQuery(SearchExecutionContext context) throws IOException { } } if (queryName != null) { + if (context.rewriteToNamedQuery()) { + query = NamedMatches.wrapQuery(queryName, query); + } context.addNamedQuery(queryName, query); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java index a966d94e8b72f..26415a3d0e777 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java @@ -378,4 +378,14 @@ public boolean indexMatches(String pattern) { public Set getMatchingFieldNames(String pattern) { return in.getMatchingFieldNames(pattern); } + + @Override + public void setRewriteToNamedQueries() { + in.setRewriteToNamedQueries(); + } + + @Override + public boolean rewriteToNamedQuery() { + return in.rewriteToNamedQuery(); + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 143dfe7fe6e9d..c04182dfacd54 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -97,8 +97,8 @@ public class SearchExecutionContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private NestedScope nestedScope; - private QueryBuilder aliasFilter; + private boolean rewriteToNamedQueries = false; /** * Build a {@linkplain SearchExecutionContext}. @@ -297,6 +297,10 @@ public Map copyNamedQueries() { return Map.copyOf(namedQueries); } + public boolean hasNamedQueries() { + return (namedQueries.isEmpty() == false); + } + /** * Parse a document with current mapping. */ @@ -619,4 +623,19 @@ public MappingLookup.CacheKey mappingCacheKey() { public NestedDocuments getNestedDocuments() { return new NestedDocuments(mappingLookup, bitsetFilterCache::getBitSetProducer, indexVersionCreated()); } + + /** + * Instructs to rewrite Elasticsearch queries with _name to Lucene NamedQuery + */ + public void setRewriteToNamedQueries() { + this.rewriteToNamedQueries = true; + } + + /** + * Returns true if Elasticsearch queries with _name must be rewritten to Lucene NamedQuery + * @return + */ + public boolean rewriteToNamedQuery() { + return rewriteToNamedQueries; + } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java index 9cc9e79ebb11a..162d7311a0594 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java @@ -45,6 +45,9 @@ public class RetentionLeaseActions { public static final long RETAIN_ALL = -1; + public static final ActionType ADD = ActionType.emptyResponse("indices:admin/seq_no/add_retention_lease"); + public static final ActionType RENEW = ActionType.emptyResponse("indices:admin/seq_no/renew_retention_lease"); + public static final ActionType REMOVE = ActionType.emptyResponse("indices:admin/seq_no/remove_retention_lease"); abstract static class TransportRetentionLeaseAction> extends TransportSingleShardAction { @@ -109,139 +112,109 @@ protected boolean resolveIndex(final T request) { } - public static class Add extends ActionType { + public static class TransportAddAction extends TransportRetentionLeaseAction { - public static final Add INSTANCE = new Add(); - public static final String ACTION_NAME = "indices:admin/seq_no/add_retention_lease"; - - private Add() { - super(ACTION_NAME, in -> ActionResponse.Empty.INSTANCE); + @Inject + public TransportAddAction( + final ThreadPool threadPool, + final ClusterService clusterService, + final TransportService transportService, + final ActionFilters actionFilters, + final IndexNameExpressionResolver indexNameExpressionResolver, + final IndicesService indicesService + ) { + super( + ADD.name(), + threadPool, + clusterService, + transportService, + actionFilters, + indexNameExpressionResolver, + indicesService, + AddRequest::new + ); } - public static class TransportAction extends TransportRetentionLeaseAction { - - @Inject - public TransportAction( - final ThreadPool threadPool, - final ClusterService clusterService, - final TransportService transportService, - final ActionFilters actionFilters, - final IndexNameExpressionResolver indexNameExpressionResolver, - final IndicesService indicesService - ) { - super( - ACTION_NAME, - threadPool, - clusterService, - transportService, - actionFilters, - indexNameExpressionResolver, - indicesService, - AddRequest::new - ); - } - - @Override - void doRetentionLeaseAction( - final IndexShard indexShard, - final AddRequest request, - final ActionListener listener - ) { - indexShard.addRetentionLease( - request.getId(), - request.getRetainingSequenceNumber(), - request.getSource(), - listener.map(r -> ActionResponse.Empty.INSTANCE) - ); - } + @Override + void doRetentionLeaseAction( + final IndexShard indexShard, + final AddRequest request, + final ActionListener listener + ) { + indexShard.addRetentionLease( + request.getId(), + request.getRetainingSequenceNumber(), + request.getSource(), + listener.map(r -> ActionResponse.Empty.INSTANCE) + ); } } - public static class Renew extends ActionType { - - public static final Renew INSTANCE = new Renew(); - public static final String ACTION_NAME = "indices:admin/seq_no/renew_retention_lease"; + public static class TransportRenewAction extends TransportRetentionLeaseAction { - private Renew() { - super(ACTION_NAME, in -> ActionResponse.Empty.INSTANCE); + @Inject + public TransportRenewAction( + final ThreadPool threadPool, + final ClusterService clusterService, + final TransportService transportService, + final ActionFilters actionFilters, + final IndexNameExpressionResolver indexNameExpressionResolver, + final IndicesService indicesService + ) { + super( + RENEW.name(), + threadPool, + clusterService, + transportService, + actionFilters, + indexNameExpressionResolver, + indicesService, + RenewRequest::new + ); } - public static class TransportAction extends TransportRetentionLeaseAction { - - @Inject - public TransportAction( - final ThreadPool threadPool, - final ClusterService clusterService, - final TransportService transportService, - final ActionFilters actionFilters, - final IndexNameExpressionResolver indexNameExpressionResolver, - final IndicesService indicesService - ) { - super( - ACTION_NAME, - threadPool, - clusterService, - transportService, - actionFilters, - indexNameExpressionResolver, - indicesService, - RenewRequest::new - ); - } - - @Override - void doRetentionLeaseAction( - final IndexShard indexShard, - final RenewRequest request, - final ActionListener listener - ) { - indexShard.renewRetentionLease(request.getId(), request.getRetainingSequenceNumber(), request.getSource()); - listener.onResponse(ActionResponse.Empty.INSTANCE); - } - + @Override + void doRetentionLeaseAction( + final IndexShard indexShard, + final RenewRequest request, + final ActionListener listener + ) { + indexShard.renewRetentionLease(request.getId(), request.getRetainingSequenceNumber(), request.getSource()); + listener.onResponse(ActionResponse.Empty.INSTANCE); } - } - public static class Remove extends ActionType { + } - public static final Remove INSTANCE = new Remove(); - public static final String ACTION_NAME = "indices:admin/seq_no/remove_retention_lease"; + public static class TransportRemoveAction extends TransportRetentionLeaseAction { - private Remove() { - super(ACTION_NAME, in -> ActionResponse.Empty.INSTANCE); + @Inject + public TransportRemoveAction( + final ThreadPool threadPool, + final ClusterService clusterService, + final TransportService transportService, + final ActionFilters actionFilters, + final IndexNameExpressionResolver indexNameExpressionResolver, + final IndicesService indicesService + ) { + super( + REMOVE.name(), + threadPool, + clusterService, + transportService, + actionFilters, + indexNameExpressionResolver, + indicesService, + RemoveRequest::new + ); } - public static class TransportAction extends TransportRetentionLeaseAction { - - @Inject - public TransportAction( - final ThreadPool threadPool, - final ClusterService clusterService, - final TransportService transportService, - final ActionFilters actionFilters, - final IndexNameExpressionResolver indexNameExpressionResolver, - final IndicesService indicesService - ) { - super( - ACTION_NAME, - threadPool, - clusterService, - transportService, - actionFilters, - indexNameExpressionResolver, - indicesService, - RemoveRequest::new - ); - } - - @Override - void doRetentionLeaseAction( - final IndexShard indexShard, - final RemoveRequest request, - final ActionListener listener - ) { - indexShard.removeRetentionLease(request.getId(), listener.map(r -> ActionResponse.Empty.INSTANCE)); - } + @Override + void doRetentionLeaseAction( + final IndexShard indexShard, + final RemoveRequest request, + final ActionListener listener + ) { + indexShard.removeRetentionLease(request.getId(), listener.map(r -> ActionResponse.Empty.INSTANCE)); } } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 3ff760b753886..f23f28e4c1047 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -17,8 +17,8 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateResponse.ResetFeatureStateStatus; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -896,7 +896,7 @@ private static void cleanUpFeatureForIndices( ) { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(); deleteIndexRequest.indices(indexNames); - client.execute(DeleteIndexAction.INSTANCE, deleteIndexRequest, new ActionListener<>() { + client.execute(TransportDeleteIndexAction.TYPE, deleteIndexRequest, new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { listener.onResponse(ResetFeatureStateStatus.success(name)); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index e6ec6f25a71a9..61545ada107b6 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -320,7 +320,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi assert indexShard.indexSettings().getIndexMetadata().isSearchableSnapshot() == false; try (onCompletion) { client.execute( - StatelessPrimaryRelocationAction.INSTANCE, + StatelessPrimaryRelocationAction.TYPE, new StatelessPrimaryRelocationAction.Request( recoveryId, indexShard.shardId(), diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java b/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java index eed6a1d02ae16..490f19fc9111c 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StatelessPrimaryRelocationAction.java @@ -23,9 +23,8 @@ public class StatelessPrimaryRelocationAction { - public static final ActionType INSTANCE = new ActionType<>( - "internal:index/shard/recovery/stateless_primary_relocation", - in -> ActionResponse.Empty.INSTANCE + public static final ActionType TYPE = ActionType.emptyResponse( + "internal:index/shard/recovery/stateless_primary_relocation" ); public static class Request extends ActionRequest { diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index e02a3a40b77ef..ae454b6af1e6c 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -8,25 +8,32 @@ package org.elasticsearch.monitor.jvm; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ReferenceDocs; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.ChunkedLoggingStream; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; import org.elasticsearch.transport.Transports; +import java.io.OutputStreamWriter; +import java.io.Writer; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.nio.charset.StandardCharsets; import java.time.Clock; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.ToLongFunction; @@ -68,6 +75,31 @@ public class HotThreads { "DestroyJavaVM" ); + /** + * Capture and log the hot threads on the local node. Useful for capturing stack traces for unexpectedly-slow operations in production. + * The resulting log message may be large, and contains significant whitespace, so it is compressed and base64-encoded using {@link + * ChunkedLoggingStream}. + * + * @param logger The logger to use for the logging + * @param level The log level to use for the logging. + * @param prefix The prefix to emit on each chunk of the logging. + * @param referenceDocs A link to the docs describing how to decode the logging. + */ + public static void logLocalHotThreads(Logger logger, Level level, String prefix, ReferenceDocs referenceDocs) { + if (logger.isEnabled(level) == false) { + return; + } + + try ( + var stream = ChunkedLoggingStream.create(logger, level, prefix, referenceDocs); + var writer = new OutputStreamWriter(stream, StandardCharsets.UTF_8) + ) { + new HotThreads().busiestThreads(500).ignoreIdleThreads(false).detect(writer); + } catch (Exception e) { + logger.error(() -> org.elasticsearch.common.Strings.format("failed to write local hot threads with prefix [%s]", prefix), e); + } + } + public enum ReportType { CPU("cpu"), @@ -154,12 +186,12 @@ public HotThreads sortOrder(SortOrder order) { return this; } - public String detect() throws Exception { + public void detect(Writer writer) throws Exception { synchronized (mutex) { - return innerDetect(ManagementFactory.getThreadMXBean(), SunThreadInfo.INSTANCE, Thread.currentThread().getId(), (interval) -> { + innerDetect(ManagementFactory.getThreadMXBean(), SunThreadInfo.INSTANCE, Thread.currentThread().getId(), (interval) -> { Thread.sleep(interval); return null; - }); + }, writer); } } @@ -230,8 +262,13 @@ private double getTimeSharePercentage(long time) { return (((double) time) / interval.nanos()) * 100; } - String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long currentThreadId, SleepFunction threadSleep) - throws Exception { + void innerDetect( + ThreadMXBean threadBean, + SunThreadInfo sunThreadInfo, + long currentThreadId, + SleepFunction threadSleep, + Writer writer + ) throws Exception { if (threadBean.isThreadCpuTimeSupported() == false) { throw new ElasticsearchException("thread CPU time is not supported on this JDK"); } @@ -246,14 +283,14 @@ String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long cu throw new ElasticsearchException("thread wait/blocked time accounting is not supported on this JDK"); } - StringBuilder sb = new StringBuilder().append("Hot threads at ") + writer.append("Hot threads at ") .append(DATE_TIME_FORMATTER.format(LocalDateTime.now(Clock.systemUTC()))) .append(", interval=") - .append(interval) + .append(interval.toString()) .append(", busiestThreads=") - .append(busiestThreads) + .append(Integer.toString(busiestThreads)) .append(", ignoreIdleThreads=") - .append(ignoreIdleThreads) + .append(Boolean.toString(ignoreIdleThreads)) .append(":\n"); // Capture before and after thread state with timings @@ -303,9 +340,8 @@ String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long cu ThreadTimeAccumulator topThread = topThreads.get(t); switch (type) { - case MEM -> sb.append( - String.format( - Locale.ROOT, + case MEM -> writer.append( + Strings.format( "%n%s memory allocated by thread '%s'%n", ByteSizeValue.ofBytes(topThread.getAllocatedBytes()), threadName @@ -318,9 +354,8 @@ String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long cu : getTimeSharePercentage(topThread.getOtherTime()); double percentTotal = (Transports.isTransportThread(threadName)) ? percentCpu : percentOther + percentCpu; String otherLabel = (Transports.isTransportThread(threadName)) ? "idle" : "other"; - sb.append( - String.format( - Locale.ROOT, + writer.append( + Strings.format( "%n%4.1f%% [cpu=%1.1f%%, %s=%1.1f%%] (%s out of %s) %s usage by thread '%s'%n", percentTotal, percentCpu, @@ -336,9 +371,8 @@ String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long cu default -> { long time = ThreadTimeAccumulator.valueGetterForReportType(type).applyAsLong(topThread); double percent = getTimeSharePercentage(time); - sb.append( - String.format( - Locale.ROOT, + writer.append( + Strings.format( "%n%4.1f%% (%s out of %s) %s usage by thread '%s'%n", percent, TimeValue.timeValueNanos(time), @@ -377,29 +411,21 @@ String innerDetect(ThreadMXBean threadBean, SunThreadInfo sunThreadInfo, long cu if (allInfos[i][t] != null) { final StackTraceElement[] show = allInfos[i][t].getStackTrace(); if (count == 1) { - sb.append(String.format(Locale.ROOT, " unique snapshot%n")); + writer.append(Strings.format(" unique snapshot%n")); for (StackTraceElement frame : show) { - sb.append(String.format(Locale.ROOT, " %s%n", frame)); + writer.append(Strings.format(" %s%n", frame)); } } else { - sb.append( - String.format( - Locale.ROOT, - " %d/%d snapshots sharing following %d elements%n", - count, - threadElementsSnapshotCount, - maxSim - ) + writer.append( + Strings.format(" %d/%d snapshots sharing following %d elements%n", count, threadElementsSnapshotCount, maxSim) ); for (int l = show.length - maxSim; l < show.length; l++) { - sb.append(String.format(Locale.ROOT, " %s%n", show[l])); + writer.append(Strings.format(" %s%n", show[l])); } } } } } - - return sb.toString(); } static int similarity(ThreadInfo threadInfo, ThreadInfo threadInfo0) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java index 2cfc152fdcebd..8c8624f1766b1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestAddVotingConfigExclusionAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; @@ -54,7 +54,7 @@ public boolean canTripCircuitBreaker() { protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { AddVotingConfigExclusionsRequest votingConfigExclusionsRequest = resolveVotingConfigExclusionsRequest(request); return channel -> client.execute( - AddVotingConfigExclusionsAction.INSTANCE, + TransportAddVotingConfigExclusionsAction.TYPE, votingConfigExclusionsRequest, new RestToXContentListener<>(channel) ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java index d0797b4b1b8d4..69b51afb8d257 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClearVotingConfigExclusionsAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -40,7 +40,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final var req = resolveVotingConfigExclusionsRequest(request); - return channel -> client.execute(ClearVotingConfigExclusionsAction.INSTANCE, req, new RestToXContentListener<>(channel)); + return channel -> client.execute(TransportClearVotingConfigExclusionsAction.TYPE, req, new RestToXContentListener<>(channel)); } static ClearVotingConfigExclusionsRequest resolveVotingConfigExclusionsRequest(final RestRequest request) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredBalanceAction.java index d471d6bdaa3e3..66382c20cae82 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredBalanceAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.allocation.DeleteDesiredBalanceAction; import org.elasticsearch.action.admin.cluster.allocation.DesiredBalanceRequest; +import org.elasticsearch.action.admin.cluster.allocation.TransportDeleteDesiredBalanceAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -33,7 +33,7 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return channel -> client.execute( - DeleteDesiredBalanceAction.INSTANCE, + TransportDeleteDesiredBalanceAction.TYPE, new DesiredBalanceRequest(), new RestToXContentListener<>(channel) ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java index fbd9dc2f30294..27bcd82075f04 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteDesiredNodesAction.java @@ -8,7 +8,7 @@ package org.elasticsearch.rest.action.admin.cluster; -import org.elasticsearch.action.admin.cluster.desirednodes.DeleteDesiredNodesAction; +import org.elasticsearch.action.admin.cluster.desirednodes.TransportDeleteDesiredNodesAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -30,10 +30,10 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - final DeleteDesiredNodesAction.Request deleteDesiredNodesRequest = new DeleteDesiredNodesAction.Request(); + final TransportDeleteDesiredNodesAction.Request deleteDesiredNodesRequest = new TransportDeleteDesiredNodesAction.Request(); deleteDesiredNodesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteDesiredNodesRequest.masterNodeTimeout())); return restChannel -> client.execute( - DeleteDesiredNodesAction.INSTANCE, + TransportDeleteDesiredNodesAction.TYPE, deleteDesiredNodesRequest, new RestToXContentListener<>(restChannel) ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetDesiredBalanceAction.java index 11732747b2b3a..312ce353b6d42 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetDesiredBalanceAction.java @@ -9,7 +9,7 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.allocation.DesiredBalanceRequest; -import org.elasticsearch.action.admin.cluster.allocation.GetDesiredBalanceAction; +import org.elasticsearch.action.admin.cluster.allocation.TransportGetDesiredBalanceAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -33,7 +33,7 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return restChannel -> client.execute( - GetDesiredBalanceAction.INSTANCE, + TransportGetDesiredBalanceAction.TYPE, new DesiredBalanceRequest(), new RestChunkedToXContentListener<>(restChannel) ); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardSizeInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardSizeInfo.java index 864303fe6b496..29ae2d1c5da4b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardSizeInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardSizeInfo.java @@ -9,6 +9,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.cluster.routing.RecoverySource; +import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import java.util.Map; @@ -24,11 +25,8 @@ public SnapshotShardSizeInfo(Map listeners = new ConcurrentLinkedQueue<>(); + private final SubscribableListener listeners = new SubscribableListener<>(); public CancellableTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { super(id, type, action, description, parentTaskId, headers); @@ -39,7 +39,7 @@ final void cancel(String reason) { this.isCancelled = true; this.reason = reason; } - listeners.forEach(CancellationListener::onCancelled); + listeners.onResponse(null); onCancelled(); } @@ -74,14 +74,7 @@ public final String getReasonCancelled() { * This method adds a listener that needs to be notified if this task is cancelled. */ public final void addListener(CancellationListener listener) { - synchronized (this) { - if (this.isCancelled == false) { - listeners.add(listener); - } - } - if (isCancelled) { - listener.onCancelled(); - } + listeners.addListener(new CancellationListenerAdapter(listener)); } /** @@ -127,4 +120,16 @@ private TaskCancelledException getTaskCancelledException() { public interface CancellationListener { void onCancelled(); } + + private record CancellationListenerAdapter(CancellationListener cancellationListener) implements ActionListener { + @Override + public void onResponse(Void unused) { + cancellationListener.onCancelled(); + } + + @Override + public void onFailure(Exception e) { + assert false : e; + } + } } diff --git a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java index 2d5d7b7b522d1..6be145c6e9e33 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java @@ -107,49 +107,6 @@ public String toString() { ); } - public void testWrapListener() { - var succeeded = new AtomicBoolean(); - var failed = new AtomicBoolean(); - - var listener = ActionListener.wrap(new ActionListener<>() { - @Override - public void onResponse(Object o) { - assertTrue(succeeded.compareAndSet(false, true)); - if (o instanceof RuntimeException e) { - throw e; - } - } - - @Override - public void onFailure(Exception e) { - assertTrue(failed.compareAndSet(false, true)); - assertEquals("test exception", e.getMessage()); - if (e instanceof UnsupportedOperationException uoe) { - throw uoe; - } - } - - @Override - public String toString() { - return "test listener"; - } - }); - - assertEquals("wrapped{test listener}", listener.toString()); - - listener.onResponse(new Object()); - assertTrue(succeeded.getAndSet(false)); - assertFalse(failed.getAndSet(false)); - - listener.onFailure(new RuntimeException("test exception")); - assertFalse(succeeded.getAndSet(false)); - assertTrue(failed.getAndSet(false)); - - listener.onResponse(new RuntimeException("test exception")); - assertTrue(succeeded.getAndSet(false)); - assertTrue(failed.getAndSet(false)); - } - public void testOnResponse() { final int numListeners = randomIntBetween(1, 20); List> refList = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java index a98d7662b8983..a598e58ada75f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceActionTests.java @@ -90,7 +90,7 @@ public void initialize() { private static DesiredBalanceResponse execute(TransportGetDesiredBalanceAction action, ClusterState clusterState) throws Exception { return PlainActionFuture.get( future -> action.masterOperation( - new Task(1, "test", GetDesiredBalanceAction.NAME, "", TaskId.EMPTY_TASK_ID, Map.of()), + new Task(1, "test", TransportGetDesiredBalanceAction.TYPE.name(), "", TaskId.EMPTY_TASK_ID, Map.of()), new DesiredBalanceRequest(), clusterState, future diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java index 9f26e0a19c990..5f6540d46c719 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportAddVotingConfigExclusionsActionTests.java @@ -179,7 +179,7 @@ public void testWithdrawsVoteFromANode() { clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1"), expectSuccess(r -> { assertNotNull(r); @@ -197,7 +197,7 @@ public void testWithdrawsVotesFromMultipleNodes() { clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1", "other2"), expectSuccess(r -> { assertNotNull(r); @@ -230,7 +230,7 @@ public void testReturnsImmediatelyIfVoteAlreadyWithdrawn() { // no observer to reconfigure transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1"), expectSuccess(r -> { assertNotNull(r); @@ -247,7 +247,7 @@ public void testExcludeAbsentNodesByNodeIds() { final var countDownLatch = new CountDownLatch(1); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest(new String[] { "absent_id" }, Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)), expectSuccess(r -> { final var state = clusterService.getClusterApplierService().state(); @@ -267,7 +267,7 @@ public void testExcludeExistingNodesByNodeIds() { clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest(new String[] { "other1", "other2" }, Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)), expectSuccess(r -> { assertNotNull(r); @@ -284,7 +284,7 @@ public void testExcludeAbsentNodesByNodeNames() { final var countDownLatch = new CountDownLatch(1); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("absent_node"), expectSuccess(r -> { final var state = clusterService.getClusterApplierService().state(); @@ -304,7 +304,7 @@ public void testExcludeExistingNodesByNodeNames() { clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1", "other2"), expectSuccess(r -> { assertNotNull(r); @@ -332,7 +332,7 @@ public void testTriggersReconfigurationEvenIfAllExclusionsAlreadyAddedButStillIn clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), randomFrom( new AddVotingConfigExclusionsRequest("other1"), new AddVotingConfigExclusionsRequest(new String[] { "other1" }, Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)) @@ -367,7 +367,7 @@ public void testExcludeByNodeIdSucceedsEvenIfAllExclusionsAlreadyAdded() { clusterStateObserver.waitForNextChange(new AdjustConfigurationForExclusions()); transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest(new String[] { "other1" }, Strings.EMPTY_ARRAY, TimeValue.timeValueSeconds(30)), expectSuccess(r -> { assertNotNull(r); @@ -399,7 +399,7 @@ public void testExcludeByNodeNameSucceedsEvenIfAllExclusionsAlreadyAdded() { transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1"), expectSuccess(r -> { assertNotNull(r); @@ -456,7 +456,7 @@ public void testReturnsErrorIfMaximumExclusionCountExceeded() { transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest("other1", "other2"), expectError(e -> { final Throwable rootCause = e.getRootCause(); @@ -484,7 +484,7 @@ public void testTimesOut() { transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest(Strings.EMPTY_ARRAY, new String[] { "other1" }, TimeValue.timeValueMillis(100)), expectError(e -> { final Throwable rootCause = e.getRootCause(); @@ -503,7 +503,7 @@ public void testCannotAddVotingConfigExclusionsWhenItIsDisabled() { transportService.sendRequest( localNode, - AddVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), new AddVotingConfigExclusionsRequest(Strings.EMPTY_ARRAY, new String[] { "other1" }, TimeValue.timeValueMillis(100)), expectError(e -> { final Throwable rootCause = e.getRootCause(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java index 2193e07f84f38..17a22ff8e82fd 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java @@ -122,7 +122,7 @@ public void testClearsVotingConfigExclusions() { clearVotingConfigExclusionsRequest.setWaitForRemoval(false); transportService.sendRequest( localNode, - ClearVotingConfigExclusionsAction.NAME, + TransportClearVotingConfigExclusionsAction.TYPE.name(), clearVotingConfigExclusionsRequest, expectSuccess(r -> { assertNotNull(r); @@ -140,7 +140,7 @@ public void testTimesOutIfWaitingForNodesThatAreNotRemoved() { clearVotingConfigExclusionsRequest.setTimeout(TimeValue.timeValueMillis(100)); transportService.sendRequest( localNode, - ClearVotingConfigExclusionsAction.NAME, + TransportClearVotingConfigExclusionsAction.TYPE.name(), clearVotingConfigExclusionsRequest, expectError(e -> { assertThat( @@ -164,7 +164,7 @@ public void testSucceedsIfNodesAreRemovedWhileWaiting() { transportService.sendRequest( localNode, - ClearVotingConfigExclusionsAction.NAME, + TransportClearVotingConfigExclusionsAction.TYPE.name(), new ClearVotingConfigExclusionsRequest(), expectSuccess(r -> { assertThat(clusterService.getClusterApplierService().state().getVotingConfigExclusions(), empty()); @@ -186,7 +186,7 @@ public void testCannotClearVotingConfigurationWhenItIsDisabled() { transportService.sendRequest( localNode, - ClearVotingConfigExclusionsAction.NAME, + TransportClearVotingConfigExclusionsAction.TYPE.name(), new ClearVotingConfigExclusionsRequest(), expectError(e -> { final Throwable rootCause = e.getRootCause(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 0cc1588b814c5..345f85470a056 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ReachabilityChecker; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.FakeTcpChannel; import org.elasticsearch.transport.TestTransportChannels; @@ -649,4 +650,16 @@ public void onFailure(Exception e) { concurrentNotify.join(); } + public void testReleaseListenersOnCancellation() { + final CancellableTask task = new CancellableTask(randomLong(), "transport", "action", "", TaskId.EMPTY_TASK_ID, emptyMap()); + final AtomicBoolean cancelNotified = new AtomicBoolean(); + final ReachabilityChecker reachabilityChecker = new ReachabilityChecker(); + task.addListener(reachabilityChecker.register(() -> assertTrue(cancelNotified.compareAndSet(false, true)))); + + reachabilityChecker.checkReachable(); + TaskCancelHelper.cancel(task, "simulated"); + reachabilityChecker.ensureUnreachable(); + assertTrue(cancelNotified.get()); + } + } diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java index 01acbfe06b9fd..5175fee7edceb 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; +import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; @@ -50,7 +50,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { TransportGetAction.TYPE, TransportSearchAction.TYPE, TransportDeleteAction.TYPE, - DeleteStoredScriptAction.INSTANCE, + TransportDeleteStoredScriptAction.TYPE, TransportIndexAction.TYPE, // cluster admin actions @@ -102,7 +102,7 @@ public void testActions() { client.admin() .cluster() .prepareDeleteStoredScript("id") - .execute(new AssertingActionListener<>(DeleteStoredScriptAction.NAME, client.threadPool())); + .execute(new AssertingActionListener<>(TransportDeleteStoredScriptAction.TYPE.name(), client.threadPool())); client.prepareIndex("idx") .setId("id") .setSource("source", XContentType.JSON) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java index c894585edd776..62fd21defa676 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java @@ -59,6 +59,19 @@ public void testShouldReadExpectedSizeFromClusterInfo() { assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(shardSize)); } + public void testShouldReadExpectedSizeFromPrimaryWhenAddingNewReplica() { + + var shardSize = randomLongBetween(100, 1000); + var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build(); + var primary = newShardRouting("my-index", 0, randomIdentifier(), true, ShardRoutingState.STARTED); + var replica = newShardRouting("my-index", 0, randomIdentifier(), false, ShardRoutingState.INITIALIZING); + + var clusterInfo = createClusterInfo(primary, shardSize); + var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY); + + assertThat(getExpectedShardSize(replica, defaultValue, allocation), equalTo(shardSize)); + } + public void testShouldReadExpectedSizeWhenInitializingFromSnapshot() { var snapshotShardSize = randomLongBetween(100, 1000); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 4471ed678b013..6283b721cf9bd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -20,10 +20,10 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.shard.ShardId; import static org.elasticsearch.cluster.routing.RoutingNodesHelper.shardsWithState; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -50,9 +50,9 @@ public void testRebalanceOnlyAfterAllShardsAreActive() { .build(), () -> new ClusterInfo() { @Override - public Long getShardSize(ShardRouting shardRouting) { - if (shardRouting.getIndexName().equals("test")) { - return sizes[shardRouting.getId()]; + public Long getShardSize(ShardId shardId, boolean primary) { + if (shardId.getIndexName().equals("test")) { + return sizes[shardId.getId()]; } return null; } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 88c7dc24b4089..b54480cdc0856 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -507,13 +507,8 @@ public void testShardSizeAndRelocatingSize() { test_2, other_0.getTargetRelocatingShard() ); - if (other_0.primary()) { - assertEquals(10100L, sizeOfUnaccountedShards(allocation, node, false, "/dev/null")); - assertEquals(10090L, sizeOfUnaccountedShards(allocation, node, true, "/dev/null")); - } else { - assertEquals(100L, sizeOfUnaccountedShards(allocation, node, false, "/dev/null")); - assertEquals(90L, sizeOfUnaccountedShards(allocation, node, true, "/dev/null")); - } + assertEquals(10100L, sizeOfUnaccountedShards(allocation, node, false, "/dev/null")); + assertEquals(10090L, sizeOfUnaccountedShards(allocation, node, true, "/dev/null")); } public void testTakesIntoAccountExpectedSizeForInitializingSearchableSnapshots() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java deleted file mode 100644 index aca1984c502cd..0000000000000 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.StringField; - -public class ExternalMetadataMapper extends MetadataFieldMapper { - - static final String CONTENT_TYPE = "_external_root"; - static final String FIELD_NAME = "_is_external"; - static final String FIELD_VALUE = "true"; - - protected ExternalMetadataMapper() { - super(new BooleanFieldMapper.BooleanFieldType(FIELD_NAME)); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - public void postParse(DocumentParserContext context) { - context.doc().add(new StringField(FIELD_NAME, FIELD_VALUE, Store.YES)); - } - - @Override - public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - throw new UnsupportedOperationException(); - } - - public static final TypeParser PARSER = new FixedTypeParser(c -> new ExternalMetadataMapper()); -} diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseActionsTests.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseActionsTests.java index 7448814c5c1ad..3949770ae09ba 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseActionsTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseActionsTests.java @@ -45,7 +45,7 @@ public void testAddAction() { final long retainingSequenceNumber = randomBoolean() ? RETAIN_ALL : randomNonNegativeLong(); final String source = randomAlphaOfLength(8); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); @@ -76,7 +76,7 @@ public void testAddAlreadyExists() { final long retainingSequenceNumber = randomBoolean() ? RETAIN_ALL : randomNonNegativeLong(); final String source = randomAlphaOfLength(8); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); @@ -87,7 +87,7 @@ public void testAddAlreadyExists() { final RetentionLeaseAlreadyExistsException e = expectThrows( RetentionLeaseAlreadyExistsException.class, () -> client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, nextRetainingSequenceNumber, source) ).actionGet() ); @@ -112,7 +112,7 @@ public void testRenewAction() throws InterruptedException { final TimeValue estimatedTimeInterval = ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.get(node().settings()); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); @@ -155,7 +155,7 @@ public void testRenewAction() throws InterruptedException { } while (threadPool.absoluteTimeInMillis() <= timestampUpperBound); client().execute( - RetentionLeaseActions.Renew.INSTANCE, + RetentionLeaseActions.RENEW, new RetentionLeaseActions.RenewRequest(indexService.getShard(0).shardId(), id, nextRetainingSequenceNumber, source) ).actionGet(); @@ -193,7 +193,7 @@ public void testRenewNotFound() { final RetentionLeaseNotFoundException e = expectThrows( RetentionLeaseNotFoundException.class, () -> client().execute( - RetentionLeaseActions.Renew.INSTANCE, + RetentionLeaseActions.RENEW, new RetentionLeaseActions.RenewRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet() ); @@ -209,14 +209,12 @@ public void testRemoveAction() { final long retainingSequenceNumber = randomBoolean() ? RETAIN_ALL : randomNonNegativeLong(); final String source = randomAlphaOfLength(8); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); - client().execute( - RetentionLeaseActions.Remove.INSTANCE, - new RetentionLeaseActions.RemoveRequest(indexService.getShard(0).shardId(), id) - ).actionGet(); + client().execute(RetentionLeaseActions.REMOVE, new RetentionLeaseActions.RemoveRequest(indexService.getShard(0).shardId(), id)) + .actionGet(); final IndicesStatsResponse stats = client().execute(IndicesStatsAction.INSTANCE, new IndicesStatsRequest().indices("index")) .actionGet(); @@ -241,7 +239,7 @@ public void testRemoveNotFound() { final RetentionLeaseNotFoundException e = expectThrows( RetentionLeaseNotFoundException.class, () -> client().execute( - RetentionLeaseActions.Remove.INSTANCE, + RetentionLeaseActions.REMOVE, new RetentionLeaseActions.RemoveRequest(indexService.getShard(0).shardId(), id) ).actionGet() ); @@ -258,7 +256,7 @@ public void testAddUnderBlock() throws InterruptedException { runActionUnderBlockTest( indexService, (shardId, actionLatch) -> client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(shardId, id, retainingSequenceNumber, source), new ActionListener<>() { @@ -310,7 +308,7 @@ public void testRenewUnderBlock() throws InterruptedException { final TimeValue estimatedTimeInterval = ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.get(node().settings()); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); @@ -355,7 +353,7 @@ public void testRenewUnderBlock() throws InterruptedException { runActionUnderBlockTest( indexService, (shardId, actionLatch) -> client().execute( - RetentionLeaseActions.Renew.INSTANCE, + RetentionLeaseActions.RENEW, new RetentionLeaseActions.RenewRequest(shardId, id, nextRetainingSequenceNumber, source), new ActionListener<>() { @@ -404,14 +402,14 @@ public void testRemoveUnderBlock() throws InterruptedException { final String source = randomAlphaOfLength(8); client().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(indexService.getShard(0).shardId(), id, retainingSequenceNumber, source) ).actionGet(); runActionUnderBlockTest( indexService, (shardId, actionLatch) -> client().execute( - RetentionLeaseActions.Remove.INSTANCE, + RetentionLeaseActions.REMOVE, new RetentionLeaseActions.RemoveRequest(shardId, id), new ActionListener<>() { diff --git a/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java b/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java index 569062317b7b5..16760d103f17f 100644 --- a/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/jvm/HotThreadsTests.java @@ -15,6 +15,7 @@ import org.mockito.ArgumentMatchers; import org.mockito.InOrder; +import java.io.StringWriter; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.ArrayList; @@ -302,7 +303,7 @@ public void testInnerDetectCPUMode() throws Exception { .threadElementsSnapshotCount(11) .ignoreIdleThreads(false); - String innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(innerResult, containsString("Hot threads at ")); assertThat(innerResult, containsString("interval=10ms, busiestThreads=4, ignoreIdleThreads=false:")); @@ -321,7 +322,7 @@ public void testInnerDetectCPUMode() throws Exception { assertThat(innerResult, containsString("org.elasticsearch.monitor.testOther.methodFinal(Some_File:1)")); // Let's ask again without progressing the CPU thread counters, e.g. resetting the mocks - innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(innerResult, containsString("0.0% [cpu=0.0%, other=0.0%] (0s out of 10ms) cpu usage by thread 'Thread 4'")); assertThat(innerResult, containsString("0.0% [cpu=0.0%, other=0.0%] (0s out of 10ms) cpu usage by thread 'Thread 3'")); @@ -340,7 +341,7 @@ public void testInnerDetectCPUMode() throws Exception { .threadElementsSnapshotCount(11) .ignoreIdleThreads(false); - innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(innerResult, containsString("Hot threads at ")); assertThat(innerResult, containsString("interval=10ms, busiestThreads=4, ignoreIdleThreads=false:")); @@ -377,7 +378,7 @@ public void testInnerDetectWaitMode() throws Exception { List waitOrderedInfos = List.of(allInfos.get(3), allInfos.get(1), allInfos.get(0), allInfos.get(2)); when(mockedMXBean.getThreadInfo(ArgumentMatchers.any(), anyInt())).thenReturn(waitOrderedInfos.toArray(new ThreadInfo[0])); - String waitInnerResult = hotWaitingThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String waitInnerResult = innerDetect(hotWaitingThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat( waitInnerResult, @@ -401,7 +402,7 @@ public void testInnerDetectWaitMode() throws Exception { waitOrderedInfos = List.of(allInfos.get(3), allInfos.get(1), allInfos.get(0), allInfos.get(2)); when(mockedMXBean.getThreadInfo(ArgumentMatchers.any(), anyInt())).thenReturn(waitOrderedInfos.toArray(new ThreadInfo[0])); - waitInnerResult = hotWaitingThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + waitInnerResult = innerDetect(hotWaitingThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat( waitInnerResult, @@ -431,7 +432,7 @@ public void testInnerDetectBlockedMode() throws Exception { List blockOrderedInfos = List.of(allInfos.get(2), allInfos.get(0), allInfos.get(1), allInfos.get(3)); when(mockedMXBean.getThreadInfo(ArgumentMatchers.any(), anyInt())).thenReturn(blockOrderedInfos.toArray(new ThreadInfo[0])); - String blockInnerResult = hotBlockedThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String blockInnerResult = innerDetect(hotBlockedThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat( blockInnerResult, @@ -455,7 +456,7 @@ public void testInnerDetectBlockedMode() throws Exception { blockOrderedInfos = List.of(allInfos.get(2), allInfos.get(0), allInfos.get(1), allInfos.get(3)); when(mockedMXBean.getThreadInfo(ArgumentMatchers.any(), anyInt())).thenReturn(blockOrderedInfos.toArray(new ThreadInfo[0])); - blockInnerResult = hotBlockedThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + blockInnerResult = innerDetect(hotBlockedThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat( blockInnerResult, @@ -490,7 +491,7 @@ public void testInnerDetectMemoryMode() throws Exception { .threadElementsSnapshotCount(1) .ignoreIdleThreads(false); - String memInnerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String memInnerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(memInnerResult, containsString(" unique snapshot")); assertThat( memInnerResult, @@ -519,7 +520,7 @@ public void testInnerDetectMemoryMode() throws Exception { .threadElementsSnapshotCount(1) .ignoreIdleThreads(false); - memInnerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + memInnerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(memInnerResult, containsString(" unique snapshot")); assertThat( memInnerResult, @@ -551,7 +552,7 @@ public void testInnerDetectSingleSnapshot() throws Exception { .threadElementsSnapshotCount(1) .ignoreIdleThreads(false); - String singleResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String singleResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(singleResult, containsString(" unique snapshot")); assertEquals(5, singleResult.split(" unique snapshot").length); @@ -581,7 +582,7 @@ public void testEnsureInnerDetectSkipsCurrentThread() throws Exception { .threadElementsSnapshotCount(11) .ignoreIdleThreads(false); - String innerResult = hotThreads.innerDetect(mockedMXBean, mock(SunThreadInfo.class), mockCurrentThreadId, (interval) -> null); + String innerResult = innerDetect(hotThreads, mockedMXBean, mock(SunThreadInfo.class), mockCurrentThreadId); assertEquals(1, innerResult.lines().count()); } @@ -811,7 +812,7 @@ public void testWaitBlockTimeMonitoringEnabled() throws Exception { Exception e = expectThrows( ElasticsearchException.class, - () -> hotThreads.innerDetect(mockedMXBean, mock(SunThreadInfo.class), mockCurrentThreadId, (interval) -> null) + () -> innerDetect(hotThreads, mockedMXBean, mock(SunThreadInfo.class), mockCurrentThreadId) ); assertEquals(e.getMessage(), "thread wait/blocked time accounting is not supported on this JDK"); @@ -846,7 +847,7 @@ public void testGetThreadAllocatedBytesFailures() throws Exception { ElasticsearchException exception = expectThrows( ElasticsearchException.class, - () -> hotThreads0.innerDetect(mockedMXBean, mockedSunThreadInfo, 0L, (interval) -> null) + () -> innerDetect(hotThreads0, mockedMXBean, mockedSunThreadInfo, 0L) ); assertThat(exception.getMessage(), equalTo("thread allocated memory is not supported on this JDK")); } @@ -869,7 +870,7 @@ public void testInnerDetectCPUModeTransportThreads() throws Exception { .threadElementsSnapshotCount(11) .ignoreIdleThreads(false); - String innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + String innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(innerResult, containsString("Hot threads at ")); assertThat(innerResult, containsString("interval=10ms, busiestThreads=4, ignoreIdleThreads=false:")); @@ -888,7 +889,7 @@ public void testInnerDetectCPUModeTransportThreads() throws Exception { assertThat(innerResult, containsString("org.elasticsearch.monitor.testOther.methodFinal(Some_File:1)")); // Let's ask again without progressing the CPU thread counters, e.g. resetting the mocks - innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat( innerResult, @@ -919,7 +920,7 @@ public void testInnerDetectCPUModeTransportThreads() throws Exception { .threadElementsSnapshotCount(11) .ignoreIdleThreads(false); - innerResult = hotThreads.innerDetect(mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId, (interval) -> null); + innerResult = innerDetect(hotThreads, mockedMXBean, mockedSunThreadInfo, mockCurrentThreadId); assertThat(innerResult, containsString("Hot threads at ")); assertThat(innerResult, containsString("interval=10ms, busiestThreads=4, ignoreIdleThreads=false:")); @@ -937,4 +938,16 @@ public void testInnerDetectCPUModeTransportThreads() throws Exception { assertThat(innerResult, containsString("org.elasticsearch.monitor.test.method_1(Some_File:1)")); assertThat(innerResult, containsString("org.elasticsearch.monitor.testOther.methodFinal(Some_File:1)")); } + + private static String innerDetect( + HotThreads hotThreads, + ThreadMXBean mockedMthreadMXBeanBean, + SunThreadInfo sunThreadInfo, + long currentThreadId + ) throws Exception { + try (var writer = new StringWriter()) { + hotThreads.innerDetect(mockedMthreadMXBeanBean, sunThreadInfo, currentThreadId, (interval) -> null, writer); + return writer.toString(); + } + } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index c5d5ecc1f90e8..ad09c58b65cba 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; @@ -27,7 +26,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; @@ -42,7 +40,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction; @@ -2036,7 +2033,7 @@ protected void assertSnapshotOrGenericThread() { ) ); actions.put( - DeleteIndexAction.INSTANCE, + TransportDeleteIndexAction.TYPE, new TransportDeleteIndexAction( transportService, clusterService, @@ -2048,7 +2045,7 @@ protected void assertSnapshotOrGenericThread() { ) ); actions.put( - PutRepositoryAction.INSTANCE, + TransportPutRepositoryAction.TYPE, new TransportPutRepositoryAction( transportService, clusterService, @@ -2126,7 +2123,7 @@ protected void assertSnapshotOrGenericThread() { ) ); actions.put( - DeleteSnapshotAction.INSTANCE, + TransportDeleteSnapshotAction.TYPE, new TransportDeleteSnapshotAction( transportService, clusterService, diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java index 41e6a818a62da..f6c5fdfe4db5c 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java @@ -56,6 +56,7 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103286") @SuppressWarnings("unchecked") public void testApmIntegration() throws Exception { Map>> sampleAssertions = new HashMap<>( diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 71030358e901f..b1765218ff7f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -36,6 +36,7 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotRestoreException; @@ -333,20 +334,8 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t logger.info("--> add random documents to {}", index); addRandomDocuments(index, randomIntBetween(10, 1000)); } else { - var resp = prepareSearch(index).setSize(0).get(); - final int docCount; - try { - docCount = (int) resp.getHits().getTotalHits().value; - } finally { - resp.decRef(); - } - int deleteCount = randomIntBetween(1, docCount); - logger.info("--> delete {} random documents from {}", deleteCount, index); - for (int i = 0; i < deleteCount; i++) { - int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(index, Integer.toString(doc)).get(); - } - client().admin().indices().prepareRefresh(index).get(); + final int docCount = (int) SearchResponseUtils.getTotalHitsValue(prepareSearch(index).setSize(0)); + deleteRandomDocs(index, docCount); } } @@ -395,13 +384,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { if (randomBoolean() && i > 0) { // don't delete on the first iteration int docCount = docCounts[i - 1]; if (docCount > 0) { - int deleteCount = randomIntBetween(1, docCount); - logger.info("--> delete {} random documents from {}", deleteCount, indexName); - for (int j = 0; j < deleteCount; j++) { - int doc = randomIntBetween(0, docCount - 1); - client().prepareDelete(indexName, Integer.toString(doc)).get(); - } - client().admin().indices().prepareRefresh(indexName).get(); + deleteRandomDocs(indexName, docCount); } } else { int docCount = randomIntBetween(10, 1000); @@ -409,12 +392,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { addRandomDocuments(indexName, docCount); } // Check number of documents in this iteration - var resp = prepareSearch(indexName).setSize(0).get(); - try { - docCounts[i] = (int) resp.getHits().getTotalHits().value; - } finally { - resp.decRef(); - } + docCounts[i] = (int) SearchResponseUtils.getTotalHitsValue(prepareSearch(indexName).setSize(0)); logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); assertSuccessfulSnapshot( clusterAdmin().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName) @@ -446,6 +424,16 @@ public void testMultipleSnapshotAndRollback() throws Exception { } } + private void deleteRandomDocs(String indexName, int existingDocCount) { + int deleteCount = randomIntBetween(1, existingDocCount); + logger.info("--> delete {} random documents from {}", deleteCount, indexName); + for (int j = 0; j < deleteCount; j++) { + int doc = randomIntBetween(0, existingDocCount - 1); + client().prepareDelete(indexName, Integer.toString(doc)).get(); + } + client().admin().indices().prepareRefresh(indexName).get(); + } + public void testIndicesDeletedFromRepository() throws Exception { final String repoName = createRepository(randomRepositoryName()); Client client = client(); diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 0b5b953df84fc..5ab1641307fc5 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -47,6 +46,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.repositories.blobstore.ChecksumBlobStoreFormat; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.snapshots.mockstore.MockRepository; @@ -502,14 +502,9 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce } protected long getCountForIndex(String indexName) { - var resp = client().search( - new SearchRequest(new SearchRequest(indexName).source(new SearchSourceBuilder().size(0).trackTotalHits(true))) - ).actionGet(); - try { - return resp.getHits().getTotalHits().value; - } finally { - resp.decRef(); - } + return SearchResponseUtils.getTotalHitsValue( + client().prepareSearch(indexName).setSource(new SearchSourceBuilder().size(0).trackTotalHits(true)) + ); } protected void assertDocCount(String index, long count) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 779d846f4eac2..23721de4aad9c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -129,6 +129,7 @@ import org.elasticsearch.search.ConcurrentSearchTestPlugin; import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.client.RandomizingClient; import org.elasticsearch.test.disruption.NetworkDisruption; @@ -1064,17 +1065,14 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - var resp = prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()).get(); - try { - long count = resp.getHits().getTotalHits().value; - if (count == lastKnownCount) { - // no progress - try to refresh for the next time - indicesAdmin().prepareRefresh().get(); - } - lastKnownCount = count; - } finally { - resp.decRef(); + long count = SearchResponseUtils.getTotalHitsValue( + prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()) + ); + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + indicesAdmin().prepareRefresh().get(); } + lastKnownCount = count; } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); throw e; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 0ce970943cc0b..38c38e719138e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -17,10 +17,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -1906,7 +1906,7 @@ private Set excludeMasters(Collection nodeAndClients) { logger.info("adding voting config exclusions {} prior to restart/shutdown", excludedNodeNames); try { client().execute( - AddVotingConfigExclusionsAction.INSTANCE, + TransportAddVotingConfigExclusionsAction.TYPE, new AddVotingConfigExclusionsRequest(excludedNodeNames.toArray(Strings.EMPTY_ARRAY)) ).get(); } catch (InterruptedException | ExecutionException e) { @@ -1923,7 +1923,7 @@ private void removeExclusions(Set excludedNodeIds) { logger.info("removing voting config exclusions for {} after restart/shutdown", excludedNodeIds); try { Client client = getRandomNodeAndClient(node -> excludedNodeIds.contains(node.name) == false).client(); - client.execute(ClearVotingConfigExclusionsAction.INSTANCE, new ClearVotingConfigExclusionsRequest()).get(); + client.execute(TransportClearVotingConfigExclusionsAction.TYPE, new ClearVotingConfigExclusionsRequest()).get(); } catch (InterruptedException | ExecutionException e) { ESTestCase.fail(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index a58810e91e186..fd5bd253fd8e5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -58,6 +58,9 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9 public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); + // YAML + public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); + @Override public Map getHistoricalFeatures() { return Map.ofEntries( @@ -75,7 +78,8 @@ public Map getHistoricalFeatures() { entry(SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM, Version.V_7_2_0), entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), entry(ML_INDICES_HIDDEN, Version.V_7_7_0), - entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0) + entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0), + entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1) ); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index 7cabb5543ac16..c95fc5c131df0 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Objects; import java.util.function.BiPredicate; +import java.util.function.Predicate; /** * Used to execute REST requests according to the docs snippets that need to be tests. Wraps a @@ -40,11 +41,11 @@ public ClientYamlDocsTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion, + final Predicate clusterFeaturesPredicate, final String os, final CheckedSupplier clientBuilderWithSniffedNodes ) { - super(restSpec, restClient, hosts, esVersion, masterVersion, os, clientBuilderWithSniffedNodes); + super(restSpec, restClient, hosts, esVersion, clusterFeaturesPredicate, os, clientBuilderWithSniffedNodes); } @Override diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 7787807876724..d30f65718943e 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -47,6 +47,7 @@ import java.util.Map.Entry; import java.util.Set; import java.util.function.BiPredicate; +import java.util.function.Predicate; import java.util.stream.Collectors; import static com.carrotsearch.randomizedtesting.RandomizedTest.frequently; @@ -64,24 +65,24 @@ public class ClientYamlTestClient implements Closeable { private final ClientYamlSuiteRestSpec restSpec; private final Map restClients = new HashMap<>(); private final Version esVersion; - private final Version masterVersion; private final String os; private final CheckedSupplier clientBuilderWithSniffedNodes; + private final Predicate clusterFeaturesPredicate; ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion, + final Predicate clusterFeaturesPredicate, final String os, final CheckedSupplier clientBuilderWithSniffedNodes ) { + this.clusterFeaturesPredicate = clusterFeaturesPredicate; assert hosts.size() > 0; this.restSpec = restSpec; this.restClients.put(NodeSelector.ANY, restClient); this.esVersion = esVersion; - this.masterVersion = masterVersion; this.os = os; this.clientBuilderWithSniffedNodes = clientBuilderWithSniffedNodes; } @@ -93,8 +94,8 @@ public Version getEsVersion() { return esVersion; } - public Version getMasterVersion() { - return masterVersion; + public boolean clusterHasFeature(String featureId) { + return clusterFeaturesPredicate.test(featureId); } public String getOs() { diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 8b77acb3ee133..a584280119ef3 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -230,10 +230,6 @@ public Version esVersion() { return clientYamlTestClient.getEsVersion(); } - public Version masterVersion() { - return clientYamlTestClient.getMasterVersion(); - } - public String os() { return clientYamlTestClient.getOs(); } @@ -241,4 +237,8 @@ public String os() { public ClientYamlTestCandidate getClientYamlTestCandidate() { return clientYamlTestCandidate; } + + public boolean clusterHasFeature(String featureId) { + return clientYamlTestClient.clusterHasFeature(featureId); + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 630ee9883ff83..2e1631cc8c337 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -61,6 +61,7 @@ import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.function.Predicate; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -150,7 +151,7 @@ public void initAndResetContext() throws Exception { hosts, os ); - clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion, os); + clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, ESRestTestCase::clusterHasFeature, os); restTestExecutionContext = createRestTestExecutionContext(testCandidate, clientYamlTestClient); adminExecutionContext = new ClientYamlTestExecutionContext(testCandidate, clientYamlTestClient, false); final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); @@ -188,10 +189,18 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion, + final Predicate clusterFeaturesPredicate, final String os ) { - return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion, os, this::getClientBuilderWithSniffedHosts); + return new ClientYamlTestClient( + restSpec, + restClient, + hosts, + esVersion, + clusterFeaturesPredicate, + os, + this::getClientBuilderWithSniffedHosts + ); } @AfterClass diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 6e9107152c6f7..bd038cc4dcd58 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -19,7 +19,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; @@ -363,8 +365,16 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; - if (executionContext.esVersion().major == Version.V_7_17_0.major && executionContext.esVersion().after(Version.V_7_17_1)) { - // #84038 and #84089 mean that this assertion fails when running against a small number of 7.17.x released versions + + // #84038 and #84089 mean that this assertion fails when running against < 7.17.2 and 8.0.0 released versions + // This is really difficult to express just with features, so I will break it down into 2 parts: version check for v7, + // and feature check for v8. This way the version check can be removed once we move to v9 + @UpdateForV9 + var fixedInV7 = executionContext.esVersion().major == Version.V_7_17_0.major + && executionContext.esVersion().onOrAfter(Version.V_7_17_2); + var fixedProductionHeader = fixedInV7 + || executionContext.clusterHasFeature(RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id()); + if (fixedProductionHeader) { checkElasticProductHeader(response.getHeaders("X-elastic-product")); } checkWarningHeaders(response.getWarningHeaders(), testPath); diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java index ef1aa8c5f95ab..86cabb37f00e6 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java @@ -811,12 +811,7 @@ public String getDataPath(ShardRouting shardRouting) { } @Override - public long getShardSize(ShardRouting shardRouting, long defaultValue) { - return 1L; - } - - @Override - public Long getShardSize(ShardRouting shardRouting) { + public Long getShardSize(ShardId shardId, boolean primary) { return 1L; } }; diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java index 811f35d2ddec1..339662c996492 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java @@ -438,8 +438,8 @@ public void testUnfollowRemovesRetentionLeases() throws Exception { senderNode.getName() ); senderTransportService.addSendBehavior((connection, requestId, action, request, options) -> { - if (RetentionLeaseActions.Remove.ACTION_NAME.equals(action) - || TransportActionProxy.getProxyAction(RetentionLeaseActions.Remove.ACTION_NAME).equals(action)) { + if (RetentionLeaseActions.REMOVE.name().equals(action) + || TransportActionProxy.getProxyAction(RetentionLeaseActions.REMOVE.name()).equals(action)) { final RetentionLeaseActions.RemoveRequest removeRequest = (RetentionLeaseActions.RemoveRequest) request; if (shardIds.contains(removeRequest.getShardId().id())) { final String primaryShardNodeId = getLeaderCluster().clusterService() @@ -526,8 +526,8 @@ public void testUnfollowFailsToRemoveRetentionLeases() throws Exception { senderNode.getName() ); senderTransportService.addSendBehavior((connection, requestId, action, request, options) -> { - if (RetentionLeaseActions.Remove.ACTION_NAME.equals(action) - || TransportActionProxy.getProxyAction(RetentionLeaseActions.Remove.ACTION_NAME).equals(action)) { + if (RetentionLeaseActions.REMOVE.name().equals(action) + || TransportActionProxy.getProxyAction(RetentionLeaseActions.REMOVE.name()).equals(action)) { final RetentionLeaseActions.RemoveRequest removeRequest = (RetentionLeaseActions.RemoveRequest) request; if (shardIds.contains(removeRequest.getShardId().id())) { throw randomBoolean() @@ -847,8 +847,8 @@ public void testRetentionLeaseIsAddedIfItDisappearsWhileFollowing() throws Excep senderNode.getName() ); senderTransportService.addSendBehavior((connection, requestId, action, request, options) -> { - if (RetentionLeaseActions.Renew.ACTION_NAME.equals(action) - || TransportActionProxy.getProxyAction(RetentionLeaseActions.Renew.ACTION_NAME).equals(action)) { + if (RetentionLeaseActions.RENEW.name().equals(action) + || TransportActionProxy.getProxyAction(RetentionLeaseActions.RENEW.name()).equals(action)) { final RetentionLeaseActions.RenewRequest renewRequest = (RetentionLeaseActions.RenewRequest) request; final String retentionLeaseId = getRetentionLeaseId(followerIndex, leaderIndex); if (retentionLeaseId.equals(renewRequest.getId())) { @@ -957,8 +957,8 @@ public void testPeriodicRenewalDoesNotAddRetentionLeaseAfterUnfollow() throws Ex senderNode.getName() ); senderTransportService.addSendBehavior((connection, requestId, action, request, options) -> { - if (RetentionLeaseActions.Renew.ACTION_NAME.equals(action) - || TransportActionProxy.getProxyAction(RetentionLeaseActions.Renew.ACTION_NAME).equals(action)) { + if (RetentionLeaseActions.RENEW.name().equals(action) + || TransportActionProxy.getProxyAction(RetentionLeaseActions.RENEW.name()).equals(action)) { final String retentionLeaseId = getRetentionLeaseId(followerIndex, leaderIndex); logger.info("--> blocking renewal request for retention lease [{}] until unfollowed", retentionLeaseId); try { diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CloseFollowerIndexIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CloseFollowerIndexIT.java index 8e597c3992528..ca1f1b6c4f12c 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CloseFollowerIndexIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CloseFollowerIndexIT.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; @@ -19,6 +18,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.ReadOnlyEngine; import org.elasticsearch.index.shard.CloseFollowerIndexErrorSuppressionHelper; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.CcrIntegTestCase; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; @@ -125,14 +125,12 @@ public void testCloseAndReopenFollowerIndex() throws Exception { ensureFollowerGreen("index2"); refresh(leaderClient(), "index1"); - SearchRequest leaderSearchRequest = new SearchRequest("index1"); - leaderSearchRequest.source().trackTotalHits(true); - long leaderIndexDocs = leaderClient().search(leaderSearchRequest).actionGet().getHits().getTotalHits().value; + long leaderIndexDocs = SearchResponseUtils.getTotalHitsValue(leaderClient().prepareSearch("index1").setTrackTotalHits(true)); assertBusy(() -> { refresh(followerClient(), "index2"); - SearchRequest followerSearchRequest = new SearchRequest("index2"); - followerSearchRequest.source().trackTotalHits(true); - long followerIndexDocs = followerClient().search(followerSearchRequest).actionGet().getHits().getTotalHits().value; + long followerIndexDocs = SearchResponseUtils.getTotalHitsValue( + followerClient().prepareSearch("index2").setTrackTotalHits(true) + ); assertThat(followerIndexDocs, equalTo(leaderIndexDocs)); }, 30L, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index 88482eabafed5..070338c07003c 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -119,6 +119,7 @@ import java.util.stream.Stream; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.ccr.CcrRetentionLeases.retentionLeaseId; import static org.hamcrest.Matchers.containsString; @@ -369,9 +370,7 @@ public void testSyncMappings() throws Exception { leaderClient().prepareIndex("index1").setId(Long.toString(i)).setSource(source, XContentType.JSON).get(); } - assertBusy( - () -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), firstBatchNumDocs)); MappingMetadata mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetadata.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k", mappingMetadata.sourceAsMap()), nullValue()); @@ -382,12 +381,7 @@ public void testSyncMappings() throws Exception { leaderClient().prepareIndex("index1").setId(Long.toString(i)).setSource(source, XContentType.JSON).get(); } - assertBusy( - () -> assertThat( - followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs) - ) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), firstBatchNumDocs + secondBatchNumDocs)); mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetadata.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k.type", mappingMetadata.sourceAsMap()), equalTo("long")); @@ -413,7 +407,7 @@ public void testNoMappingDefined() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{\"f\":1}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); pauseFollow("index2"); MappingMetadata mappingMetadata = followerClient().admin().indices().prepareGetMappings("index2").get().getMappings().get("index2"); @@ -711,7 +705,7 @@ public void testCloseLeaderIndex() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); leaderClient().admin().indices().close(new CloseIndexRequest("index1")).actionGet(); assertBusy(() -> { @@ -735,7 +729,7 @@ public void testCloseLeaderIndex() throws Exception { leaderClient().admin().indices().open(new OpenIndexRequest("index1")).actionGet(); leaderClient().prepareIndex("index1").setId("2").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(2L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 2)); pauseFollow("index2"); } @@ -757,7 +751,7 @@ public void testCloseFollowIndex() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); followerClient().admin().indices().close(new CloseIndexRequest("index2").masterNodeTimeout(TimeValue.MAX_VALUE)).actionGet(); leaderClient().prepareIndex("index1").setId("2").setSource("{}", XContentType.JSON).get(); @@ -769,7 +763,7 @@ public void testCloseFollowIndex() throws Exception { assertThat(response.getStatsResponses().get(0).status().failedWriteRequests(), greaterThanOrEqualTo(1L)); }); followerClient().admin().indices().open(new OpenIndexRequest("index2").masterNodeTimeout(TimeValue.MAX_VALUE)).actionGet(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(2L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 2)); pauseFollow("index2"); } @@ -791,7 +785,7 @@ public void testDeleteLeaderIndex() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); leaderClient().admin().indices().delete(new DeleteIndexRequest("index1")).actionGet(); assertBusy(() -> { @@ -872,7 +866,7 @@ public void testDeleteFollowerIndex() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); followerClient().admin().indices().delete(new DeleteIndexRequest("index2").masterNodeTimeout(TimeValue.MAX_VALUE)).actionGet(); leaderClient().prepareIndex("index1").setId("2").setSource("{}", XContentType.JSON).get(); @@ -935,7 +929,7 @@ public void testUnfollowIndex() throws Exception { PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> { assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L)); }); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); // Indexing directly into index2 would fail now, because index2 is a follow index. // We can't test this here because an assertion trips before an actual error is thrown and then index call hangs. @@ -952,7 +946,7 @@ public void testUnfollowIndex() throws Exception { .setSource("{}", XContentType.JSON) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(2L)); + assertHitCount(followerClient().prepareSearch("index2"), 2); } public void testUnknownClusterAlias() throws Exception { @@ -1024,9 +1018,7 @@ public void testUpdateDynamicLeaderIndexSettings() throws Exception { for (long i = 0; i < firstBatchNumDocs; i++) { leaderClient().prepareIndex("leader").setSource("{}", XContentType.JSON).get(); } - assertBusy( - () -> assertThat(followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs)); // Sanity check that the setting has not been set in follower index: { @@ -1053,10 +1045,7 @@ public void testUpdateDynamicLeaderIndexSettings() throws Exception { assertThat(getFollowTaskSettingsVersion("follower"), equalTo(2L)); try { - assertThat( - followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs) - ); + assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs + secondBatchNumDocs); } catch (Exception e) { throw new AssertionError("error while searching", e); } @@ -1080,9 +1069,7 @@ public void testLeaderIndexSettingNotPercolatedToFollower() throws Exception { for (long i = 0; i < firstBatchNumDocs; i++) { leaderClient().prepareIndex("leader").setSource("{}", XContentType.JSON).get(); } - assertBusy( - () -> assertThat(followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs)); // Sanity check that the setting has not been set in follower index: { @@ -1108,10 +1095,7 @@ public void testLeaderIndexSettingNotPercolatedToFollower() throws Exception { assertThat(getFollowTaskSettingsVersion("follower"), equalTo(2L)); try { - assertThat( - followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs) - ); + assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs + secondBatchNumDocs); } catch (Exception e) { throw new AssertionError("error while searching", e); } @@ -1133,9 +1117,7 @@ public void testUpdateAnalysisLeaderIndexSettings() throws Exception { leaderClient().prepareIndex("leader").setSource("{}", XContentType.JSON).get(); } - assertBusy( - () -> assertThat(followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs)); assertThat(getFollowTaskSettingsVersion("follower"), equalTo(1L)); assertThat(getFollowTaskMappingVersion("follower"), equalTo(1L)); @@ -1185,10 +1167,7 @@ public void testUpdateAnalysisLeaderIndexSettings() throws Exception { ); try { - assertThat( - followerClient().prepareSearch("follower").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs) - ); + assertHitCount(followerClient().prepareSearch("follower"), firstBatchNumDocs + secondBatchNumDocs); } catch (Exception e) { throw new AssertionError("error while searching", e); } @@ -1369,10 +1348,8 @@ public void testIndexFallBehind() throws Exception { final IndexRoutingTable indexRoutingTable = leaderRoutingTable.index("index1"); for (int i = 0; i < indexRoutingTable.size(); i++) { final ShardId shardId = indexRoutingTable.shard(i).shardId(); - leaderClient().execute( - RetentionLeaseActions.Remove.INSTANCE, - new RetentionLeaseActions.RemoveRequest(shardId, retentionLeaseId) - ).get(); + leaderClient().execute(RetentionLeaseActions.REMOVE, new RetentionLeaseActions.RemoveRequest(shardId, retentionLeaseId)) + .get(); } }, exceptions -> assertThat(exceptions.size(), greaterThan(0))); } @@ -1574,7 +1551,7 @@ public void testCleanUpShardFollowTasksForDeletedIndices() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1").setId("1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(1L))); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), 1)); assertBusy(() -> { String action = ShardFollowTask.NAME + "[c]"; diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index d609b606238bc..05fc3b037c795 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -32,6 +32,7 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -53,21 +54,14 @@ public void testFollowIndex() throws Exception { final PutFollowAction.Request followRequest = getPutFollowRequest("leader", "follower"); client().execute(PutFollowAction.INSTANCE, followRequest).get(); - assertBusy( - () -> { assertThat(client().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)); } - ); + assertBusy(() -> assertHitCount(client().prepareSearch("follower"), firstBatchNumDocs)); final long secondBatchNumDocs = randomIntBetween(2, 64); for (int i = 0; i < secondBatchNumDocs; i++) { prepareIndex("leader").setSource("{}", XContentType.JSON).get(); } - assertBusy(() -> { - assertThat( - client().prepareSearch("follower").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs) - ); - }); + assertBusy(() -> assertHitCount(client().prepareSearch("follower"), firstBatchNumDocs + secondBatchNumDocs)); PauseFollowAction.Request pauseRequest = new PauseFollowAction.Request("follower"); client().execute(PauseFollowAction.INSTANCE, pauseRequest); @@ -78,12 +72,7 @@ public void testFollowIndex() throws Exception { } client().execute(ResumeFollowAction.INSTANCE, getResumeFollowRequest("follower")).get(); - assertBusy(() -> { - assertThat( - client().prepareSearch("follower").get().getHits().getTotalHits().value, - equalTo(firstBatchNumDocs + secondBatchNumDocs + thirdBatchNumDocs) - ); - }); + assertBusy(() -> assertHitCount(client().prepareSearch("follower"), firstBatchNumDocs + secondBatchNumDocs + thirdBatchNumDocs)); ensureEmptyWriteBuffers(); } @@ -136,9 +125,7 @@ public void testIndexingMetricsIncremented() throws Exception { assertEquals(firstBatchNumDocs, indexingPressure.stats().getCurrentPrimaryOps()); }); blocker.countDown(); - assertBusy( - () -> { assertThat(client().prepareSearch("follower").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)); } - ); + assertBusy(() -> assertHitCount(client().prepareSearch("follower"), firstBatchNumDocs)); ensureEmptyWriteBuffers(); } finally { if (blocker.getCount() > 0) { @@ -210,7 +197,7 @@ public void testChangeLeaderIndex() throws Exception { prepareIndex("index-1").setSource("{}", XContentType.JSON).get(); } client().execute(PutFollowAction.INSTANCE, getPutFollowRequest("index-1", "index-2")).get(); - assertBusy(() -> assertThat(client().prepareSearch("index-2").get().getHits().getTotalHits().value, equalTo((long) numDocs))); + assertBusy(() -> assertHitCount(client().prepareSearch("index-2"), numDocs)); // Then switch index-1 to be a follower of index-0 assertAcked(client().admin().indices().prepareCreate("index-0").setSource(settings, XContentType.JSON)); diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java index 2cb58a9991176..5c152be35b509 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java @@ -34,6 +34,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -78,9 +79,7 @@ public void testFollowIndex() throws Exception { leaderClient().prepareIndex("index1").setId(Integer.toString(i)).setSource(source, XContentType.JSON).get(); } - assertBusy( - () -> assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs)) - ); + assertBusy(() -> assertHitCount(followerClient().prepareSearch("index2"), firstBatchNumDocs)); getFollowerCluster().fullRestart(); ensureFollowerGreen("index2"); @@ -115,7 +114,7 @@ public void testFollowIndex() throws Exception { followerClient().execute(PutFollowAction.INSTANCE, putFollow("index1", "index2", ActiveShardCount.ALL)).actionGet(); } } - assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(totalDocs)); + assertHitCount(followerClient().prepareSearch("index2"), totalDocs); }, 30L, TimeUnit.SECONDS); cleanRemoteCluster(); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java index 651cf22797192..a73a3dc5d715f 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRetentionLeases.java @@ -112,7 +112,7 @@ public static void asyncAddRetentionLease( retainingSequenceNumber, "ccr" ); - remoteClient.execute(RetentionLeaseActions.Add.INSTANCE, request, listener); + remoteClient.execute(RetentionLeaseActions.ADD, request, listener); } /** @@ -167,7 +167,7 @@ public static void asyncRenewRetentionLease( retainingSequenceNumber, "ccr" ); - remoteClient.execute(RetentionLeaseActions.Renew.INSTANCE, request, listener); + remoteClient.execute(RetentionLeaseActions.RENEW, request, listener); } /** @@ -187,7 +187,7 @@ public static void asyncRemoveRetentionLease( final ActionListener listener ) { final RetentionLeaseActions.RemoveRequest request = new RetentionLeaseActions.RemoveRequest(leaderShardId, retentionLeaseId); - remoteClient.execute(RetentionLeaseActions.Remove.INSTANCE, request, listener); + remoteClient.execute(RetentionLeaseActions.REMOVE, request, listener); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 9bf22bd4e0ca3..047a2d6225035 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -61,6 +61,7 @@ import org.elasticsearch.license.LicensesMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; @@ -794,14 +795,9 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - long count = indexer.getClient() - .prepareSearch() - .setTrackTotalHits(true) - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value; + long count = SearchResponseUtils.getTotalHitsValue( + indexer.getClient().prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(QueryBuilders.matchAllQuery()) + ); if (count == lastKnownCount) { // no progress - try to refresh for the next time diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesTests.java index 2f0643ef16c1b..78d997ef9d777 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesTests.java @@ -134,7 +134,7 @@ public void testMissingOperations() throws Exception { client().admin().indices().forceMerge(forceMergeRequest).actionGet(); indicesAdmin().execute( - RetentionLeaseActions.Add.INSTANCE, + RetentionLeaseActions.ADD, new RetentionLeaseActions.AddRequest(new ShardId(resolveIndex("index"), 0), "test", RetentionLeaseActions.RETAIN_ALL, "ccr") ).get(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java index a1c52728e9a7a..099f15eb59e46 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRepositoryRetentionLeaseTests.java @@ -77,7 +77,7 @@ public void testWhenRetentionLeaseAlreadyExistsWeTryToRenewIt() { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new RetentionLeaseAlreadyExistsException(retentionLeaseId)); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.Add.INSTANCE), addRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.ADD), addRequestCaptor.capture(), any()); final ArgumentCaptor renewRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.RenewRequest.class ); @@ -86,17 +86,17 @@ public void testWhenRetentionLeaseAlreadyExistsWeTryToRenewIt() { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(ActionResponse.Empty.INSTANCE); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.Renew.INSTANCE), renewRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.RENEW), renewRequestCaptor.capture(), any()); repository.acquireRetentionLeaseOnLeader(followerShardId, retentionLeaseId, leaderShardId, remoteClient); - verify(remoteClient).execute(same(RetentionLeaseActions.Add.INSTANCE), any(RetentionLeaseActions.AddRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.ADD), any(RetentionLeaseActions.AddRequest.class), any()); assertThat(addRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(addRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(addRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); assertThat(addRequestCaptor.getValue().getSource(), equalTo("ccr")); - verify(remoteClient).execute(same(RetentionLeaseActions.Renew.INSTANCE), any(RetentionLeaseActions.RenewRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); assertThat(renewRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(renewRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(renewRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); @@ -162,7 +162,7 @@ public Void answer(final InvocationOnMock invocationOnMock) { return null; } - }).when(remoteClient).execute(same(RetentionLeaseActions.Add.INSTANCE), addRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.ADD), addRequestCaptor.capture(), any()); final ArgumentCaptor renewRequestCaptor = ArgumentCaptor.forClass( RetentionLeaseActions.RenewRequest.class ); @@ -171,21 +171,17 @@ public Void answer(final InvocationOnMock invocationOnMock) { final ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new RetentionLeaseNotFoundException(retentionLeaseId)); return null; - }).when(remoteClient).execute(same(RetentionLeaseActions.Renew.INSTANCE), renewRequestCaptor.capture(), any()); + }).when(remoteClient).execute(same(RetentionLeaseActions.RENEW), renewRequestCaptor.capture(), any()); repository.acquireRetentionLeaseOnLeader(followerShardId, retentionLeaseId, leaderShardId, remoteClient); - verify(remoteClient, times(2)).execute( - same(RetentionLeaseActions.Add.INSTANCE), - any(RetentionLeaseActions.AddRequest.class), - any() - ); + verify(remoteClient, times(2)).execute(same(RetentionLeaseActions.ADD), any(RetentionLeaseActions.AddRequest.class), any()); assertThat(addRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(addRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(addRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); assertThat(addRequestCaptor.getValue().getSource(), equalTo("ccr")); - verify(remoteClient).execute(same(RetentionLeaseActions.Renew.INSTANCE), any(RetentionLeaseActions.RenewRequest.class), any()); + verify(remoteClient).execute(same(RetentionLeaseActions.RENEW), any(RetentionLeaseActions.RenewRequest.class), any()); assertThat(renewRequestCaptor.getValue().getShardId(), equalTo(leaderShardId)); assertThat(renewRequestCaptor.getValue().getId(), equalTo(retentionLeaseId)); assertThat(renewRequestCaptor.getValue().getRetainingSequenceNumber(), equalTo(RETAIN_ALL)); diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 90a525c2df45c..d1599c8b6a827 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -59,6 +59,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @ESIntegTestCase.ClusterScope(numDataNodes = 0) @@ -123,7 +124,7 @@ public void testSnapshotAndRestore() throws Exception { assertTrue(e.toString().contains("_source only indices can't be searched or filtered")); // can-match phase pre-filters access to non-existing field - assertEquals(0, prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value); + assertHitCount(prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")), 0); // make sure deletes do not work String idToDelete = "" + randomIntBetween(0, builders.length); expectThrows(ClusterBlockException.class, () -> client().prepareDelete(sourceIdx, idToDelete).setRouting("r" + idToDelete).get()); @@ -147,7 +148,7 @@ public void testSnapshotAndRestoreWithNested() throws Exception { ); assertTrue(e.toString().contains("_source only indices can't be searched or filtered")); // can-match phase pre-filters access to non-existing field - assertEquals(0, prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value); + assertHitCount(prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")), 0); // make sure deletes do not work String idToDelete = "" + randomIntBetween(0, builders.length); expectThrows(ClusterBlockException.class, () -> client().prepareDelete(sourceIdx, idToDelete).setRouting("r" + idToDelete).get()); @@ -253,7 +254,6 @@ private static void assertMappings(String sourceIdx, boolean requireRouting, boo } private void assertHits(String index, int numDocsExpected, boolean sourceHadDeletions) { - SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected).get(); BiConsumer assertConsumer = (res, allowHoles) -> { SearchHits hits = res.getHits(); long i = 0; @@ -272,9 +272,11 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele assertEquals("r" + id, hit.field("_routing").getValue()); } }; - assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); - searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) + assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { + assertConsumer.accept(searchResponse, sourceHadDeletions); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + }); + SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll("1m") .slice(new SliceBuilder(SeqNoFieldMapper.NAME, randomIntBetween(0, 1), 2)) .setSize(randomIntBetween(1, 10)) @@ -283,12 +285,14 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele do { // now do a scroll with a slice assertConsumer.accept(searchResponse, true); + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); } while (searchResponse.getHits().getHits().length > 0); } finally { if (searchResponse.getScrollId() != null) { client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); } + searchResponse.decRef(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlCompleteLogoutAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlCompleteLogoutAction.java deleted file mode 100644 index 8e51b2d18375c..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlCompleteLogoutAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core.security.action.saml; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -/** - * ActionType for completing SAML LogoutResponse - */ -public final class SamlCompleteLogoutAction extends ActionType { - - public static final String NAME = "cluster:admin/xpack/security/saml/complete_logout"; - public static final SamlCompleteLogoutAction INSTANCE = new SamlCompleteLogoutAction(); - - private SamlCompleteLogoutAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java deleted file mode 100644 index 906ec22a7cea4..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core.security.action.user; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -public class ChangePasswordAction extends ActionType { - - public static final ChangePasswordAction INSTANCE = new ChangePasswordAction(); - public static final String NAME = "cluster:admin/xpack/security/user/change_password"; - - protected ChangePasswordAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java deleted file mode 100644 index c257764f6f6e8..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core.security.action.user; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; - -/** - * This action is for setting the enabled flag on a native or reserved user - */ -public class SetEnabledAction extends ActionType { - - public static final SetEnabledAction INSTANCE = new SetEnabledAction(); - public static final String NAME = "cluster:admin/xpack/security/user/set_enabled"; - - private SetEnabledAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index ca8932ced81b4..4968352439fb0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; @@ -112,7 +112,7 @@ public final class IndexPrivilege extends Privilege { AutoCreateAction.NAME, CreateDataStreamAction.NAME ); - private static final Automaton DELETE_INDEX_AUTOMATON = patterns(DeleteIndexAction.NAME, DeleteDataStreamAction.NAME); + private static final Automaton DELETE_INDEX_AUTOMATON = patterns(TransportDeleteIndexAction.TYPE.name(), DeleteDataStreamAction.NAME); private static final Automaton VIEW_METADATA_AUTOMATON = patterns( GetAliasesAction.NAME, GetIndexAction.NAME, @@ -152,9 +152,9 @@ public final class IndexPrivilege extends Privilege { private static final Automaton CROSS_CLUSTER_REPLICATION_AUTOMATON = patterns( "indices:data/read/xpack/ccr/shard_changes*", IndicesStatsAction.NAME + "*", - RetentionLeaseActions.Add.ACTION_NAME + "*", - RetentionLeaseActions.Remove.ACTION_NAME + "*", - RetentionLeaseActions.Renew.ACTION_NAME + "*" + RetentionLeaseActions.ADD.name() + "*", + RetentionLeaseActions.REMOVE.name() + "*", + RetentionLeaseActions.RENEW.name() + "*" ); private static final Automaton CROSS_CLUSTER_REPLICATION_INTERNAL_AUTOMATON = patterns( "indices:internal/admin/ccr/restore/session/clear*", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index bc42632507256..2616b63df7c01 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -34,9 +34,9 @@ public final class SystemPrivilege extends Privilege { "indices:admin/seq_no/global_checkpoint_sync*", // needed for global checkpoint syncs RetentionLeaseSyncAction.ACTION_NAME + "*", // needed for retention lease syncs RetentionLeaseBackgroundSyncAction.ACTION_NAME + "*", // needed for background retention lease syncs - RetentionLeaseActions.Add.ACTION_NAME + "*", // needed for CCR to add retention leases - RetentionLeaseActions.Remove.ACTION_NAME + "*", // needed for CCR to remove retention leases - RetentionLeaseActions.Renew.ACTION_NAME + "*", // needed for CCR to renew retention leases + RetentionLeaseActions.ADD.name() + "*", // needed for CCR to add retention leases + RetentionLeaseActions.REMOVE.name() + "*", // needed for CCR to remove retention leases + RetentionLeaseActions.RENEW.name() + "*", // needed for CCR to renew retention leases "indices:admin/settings/update", // needed for DiskThresholdMonitor.markIndicesReadOnly CompletionPersistentTaskAction.NAME, // needed for ShardFollowTaskCleaner "indices:data/write/*", // needed for SystemIndexMigrator diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index fc0df87425239..474ba25e3e117 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.authz.store; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; @@ -285,7 +285,7 @@ static RoleDescriptor kibanaSystem(String name) { "synthetics-browser.network-*", "synthetics-browser.screenshot-*" ) - .privileges(DeleteIndexAction.NAME) + .privileges(TransportDeleteIndexAction.TYPE.name()) .build(), // For src/dest indices of the Endpoint package that ships a transform RoleDescriptor.IndicesPrivileges.builder() @@ -326,7 +326,7 @@ static RoleDescriptor kibanaSystem(String name) { .indices("logs-ti_*.*-*") .privileges( // Require "delete_index" to perform ILM policy actions - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), // Require "read" and "view_index_metadata" for transform "read", "view_index_metadata" diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 7747461a6f93a..a383004c12878 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -448,7 +448,10 @@ public void onIndexModule(IndexModule indexModule) { @Override public Function> getFieldFilter() { - List>> items = filterPlugins(MapperPlugin.class).stream().map(p -> p.getFieldFilter()).toList(); + List>> items = filterPlugins(MapperPlugin.class).stream() + .map(p -> p.getFieldFilter()) + .filter(p -> p.equals(NOOP_FIELD_FILTER) == false) + .toList(); if (items.size() > 1) { throw new UnsupportedOperationException("Only the security MapperPlugin should override this"); } else if (items.size() == 1) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupShrinkIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupShrinkIndexStepTests.java index 90482be334363..be6338a566669 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupShrinkIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupShrinkIndexStepTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; @@ -143,7 +143,7 @@ protected void Request request, ActionListener listener ) { - assertThat(action.name(), is(DeleteIndexAction.NAME)); + assertThat(action.name(), is(TransportDeleteIndexAction.TYPE.name())); assertTrue(request instanceof DeleteIndexRequest); assertThat(((DeleteIndexRequest) request).indices(), arrayContaining(shrinkIndexName)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupSnapshotStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupSnapshotStepTests.java index 5fddcd51a6614..c7e9cc707aa02 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupSnapshotStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupSnapshotStepTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -124,7 +124,7 @@ protected void Request request, ActionListener listener ) { - assertThat(action.name(), is(DeleteSnapshotAction.NAME)); + assertThat(action.name(), is(TransportDeleteSnapshotAction.TYPE.name())); assertTrue(request instanceof DeleteSnapshotRequest); assertThat(((DeleteSnapshotRequest) request).snapshots(), arrayContaining(expectedSnapshotName)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupTargetIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupTargetIndexStepTests.java index dea53b2c736ac..679fd8835a648 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupTargetIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CleanupTargetIndexStepTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; @@ -173,7 +173,7 @@ protected void Request request, ActionListener listener ) { - assertThat(action.name(), is(DeleteIndexAction.NAME)); + assertThat(action.name(), is(TransportDeleteIndexAction.TYPE.name())); assertTrue(request instanceof DeleteIndexRequest); assertThat(((DeleteIndexRequest) request).indices(), arrayContaining(shrinkIndexName)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java index 5b5754af78018..29453205b4d00 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java @@ -125,7 +125,10 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener null, 1 ); - nextPhase.onResponse(new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null)); + ActionListener.respondAndRelease( + nextPhase, + new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null) + ); } @Override @@ -267,7 +270,10 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener awaitForLatch(); } - nextPhase.onResponse(new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null)); + ActionListener.respondAndRelease( + nextPhase, + new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null) + ); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index 7ab450fc2e191..46c393d9f0de2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -339,7 +339,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = limitedByRole.authorize( - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md.getIndicesLookup(), fieldPermissionsCache @@ -379,7 +379,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = role.authorize( - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md.getIndicesLookup(), fieldPermissionsCache diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index ed77e9a1d4d9a..831dc58e14003 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; +import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; @@ -308,7 +308,7 @@ public void testSnapshotUserRole() { assertThat(snapshotUserRole.cluster().check(SnapshotsStatusAction.NAME, request, authentication), is(true)); assertThat(snapshotUserRole.cluster().check(GetSnapshotsAction.NAME, request, authentication), is(true)); - assertThat(snapshotUserRole.cluster().check(PutRepositoryAction.NAME, request, authentication), is(false)); + assertThat(snapshotUserRole.cluster().check(TransportPutRepositoryAction.TYPE.name(), request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(DeleteIndexTemplateAction.NAME, request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(PutPipelineAction.NAME, request, authentication), is(false)); @@ -619,7 +619,10 @@ public void testKibanaSystemRole() { logger.info("index name [{}]", index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); @@ -647,7 +650,10 @@ public void testKibanaSystemRole() { logger.trace("index name [{}]", index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); @@ -675,7 +681,10 @@ public void testKibanaSystemRole() { logger.trace("index name [{}]", index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -699,7 +708,10 @@ public void testKibanaSystemRole() { Arrays.asList("apm-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); @@ -775,7 +787,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(PutMappingAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(mockIndexAbstraction(index)), is(true)); // Privileges needed for installing current ILM policy with delete action - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); }); // read-only indices for Endpoint events (to build timelines) @@ -783,7 +798,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -802,7 +820,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -872,7 +893,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -890,7 +914,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); @@ -909,7 +936,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); @@ -928,7 +958,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -950,7 +983,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); @@ -997,7 +1033,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -1016,7 +1055,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -1034,7 +1076,10 @@ public void testKibanaSystemRole() { final String index = ".management-beats"; assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -1152,7 +1197,10 @@ public void testKibanaSystemRole() { || indexName.startsWith("synthetics-browser.network-*") || indexName.startsWith("synthetics-browser.screenshot-*") || indexName.startsWith("profiling-*"); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(isAlsoIlmDeleteIndex)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(isAlsoIlmDeleteIndex) + ); }); // 4. Transform for endpoint package @@ -1192,7 +1240,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteDataStreamAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1228,7 +1276,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(indexAbstraction), is(true)); @@ -1262,7 +1310,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(PutMappingAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1305,7 +1353,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(cspIndex); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -1323,7 +1374,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(cspIndex); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -1380,7 +1434,10 @@ public void testKibanaSystemRole() { final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); @@ -1414,7 +1471,7 @@ public void testKibanaSystemRole() { logger.info("index name [{}]", indexName); final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(indexAbstraction), is(true)); }); @@ -1430,7 +1487,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(indexAbstraction), is(true)); @@ -1665,7 +1722,10 @@ public void testMonitoringUserRole() { final String index = ".monitoring-" + randomAlphaOfLength(randomIntBetween(0, 13)); assertThat(monitoringUserRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(monitoringUserRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(monitoringUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + monitoringUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(monitoringUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( monitoringUserRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), @@ -1812,7 +1872,9 @@ public void testRemoteMonitoringAgentRole() { is(true) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(monitoringIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()) + .test(mockIndexAbstraction(monitoringIndex)), is(true) ); assertThat( @@ -1864,7 +1926,9 @@ public void testRemoteMonitoringAgentRole() { is(false) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(metricbeatIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()) + .test(mockIndexAbstraction(metricbeatIndex)), is(false) ); assertThat( @@ -2011,7 +2075,9 @@ public void testRemoteMonitoringCollectorRole() { is(false) ); assertThat( - remoteMonitoringCollectorRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), + remoteMonitoringCollectorRole.indices() + .allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()) + .test(mockIndexAbstraction(index)), is(false) ); assertThat( @@ -2262,7 +2328,10 @@ public void testReportingUserRole() { final String index = ".reporting-" + randomAlphaOfLength(randomIntBetween(0, 13)); assertThat(reportingUserRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(reportingUserRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + reportingUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(reportingUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( reportingUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(mockIndexAbstraction(index)), @@ -2355,7 +2424,8 @@ public void testSuperuserRole() { .authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("a1"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); - iac = superuserRole.indices().authorize(DeleteIndexAction.NAME, Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); + iac = superuserRole.indices() + .authorize(TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("a1"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); iac = superuserRole.indices().authorize(TransportIndexAction.NAME, Sets.newHashSet("a2", "ba"), lookup, fieldPermissionsCache); @@ -2379,7 +2449,7 @@ public void testSuperuserRole() { // Write security indices => denied iac = superuserRole.indices() .authorize( - randomFrom(TransportIndexAction.NAME, DeleteIndexAction.NAME), + randomFrom(TransportIndexAction.NAME, TransportDeleteIndexAction.TYPE.name()), Sets.newHashSet(TestRestrictedIndices.SECURITY_MAIN_ALIAS), lookup, fieldPermissionsCache @@ -2409,13 +2479,13 @@ public void testSuperuserRole() { // Write security indices => denied assertThat( superuserRole.indices() - .allowedIndicesMatcher(randomFrom(TransportIndexAction.NAME, DeleteIndexAction.NAME)) + .allowedIndicesMatcher(randomFrom(TransportIndexAction.NAME, TransportDeleteIndexAction.TYPE.name())) .test(mockIndexAbstraction(TestRestrictedIndices.SECURITY_MAIN_ALIAS)), is(false) ); assertThat( superuserRole.indices() - .allowedIndicesMatcher(randomFrom(TransportIndexAction.NAME, DeleteIndexAction.NAME)) + .allowedIndicesMatcher(randomFrom(TransportIndexAction.NAME, TransportDeleteIndexAction.TYPE.name())) .test(mockIndexAbstraction(internalSecurityIndex)), is(false) ); @@ -2500,7 +2570,10 @@ public void testBeatsAdminRole() { logger.info("index name [{}]", index); assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(true)); assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(true)); - assertThat(beatsAdminRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -3425,7 +3498,10 @@ private void assertAllIndicesAccessAllowed(Role role, String index) { logger.info("index name [{}]", index); assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(role.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -3442,7 +3518,10 @@ private void assertAllIndicesAccessAllowed(Role role, String index) { } private void assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(Role role, String index) { - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -3456,7 +3535,10 @@ private void assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(Role role } private void assertReadWriteDocsButNotDeleteIndexAllowed(Role role, String index) { - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); @@ -3466,7 +3548,10 @@ private void assertReadWriteDocsButNotDeleteIndexAllowed(Role role, String index } private void assertOnlyReadAllowed(Role role, String index) { - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true)); @@ -3505,7 +3590,10 @@ private void assertNoAccessAllowed(Role role, Collection indices) { } private void assertNoAccessAllowed(Role role, String index) { - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false)); @@ -3564,7 +3652,10 @@ public void testLogstashAdminRole() { logstashAdminRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true) ); - assertThat(logstashAdminRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + logstashAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(logstashAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( logstashAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java index f9a12b1fecf0b..8d1bcfb500e30 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; +import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; @@ -80,7 +80,7 @@ public void testXPackUser() { final List sampleClusterActions = List.of( ClusterStateAction.NAME, PutComponentTemplateAction.NAME, - DeleteStoredScriptAction.NAME, + TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, CleanupRepositoryAction.NAME ); @@ -92,7 +92,7 @@ public void testXPackUser() { RefreshAction.NAME, CreateIndexAction.NAME, PutMappingAction.NAME, - DeleteIndexAction.NAME + TransportDeleteIndexAction.TYPE.name() ); checkIndexAccess(role, randomFrom(sampleIndexActions), randomAlphaOfLengthBetween(3, 12), true); checkIndexAccess( @@ -115,7 +115,7 @@ public void testXPackSecurityUser() { final List sampleClusterActions = List.of( ClusterStateAction.NAME, PutComponentTemplateAction.NAME, - DeleteStoredScriptAction.NAME, + TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, CleanupRepositoryAction.NAME ); @@ -127,7 +127,7 @@ public void testXPackSecurityUser() { RefreshAction.NAME, CreateIndexAction.NAME, PutMappingAction.NAME, - DeleteIndexAction.NAME + TransportDeleteIndexAction.TYPE.name() ); checkIndexAccess( role, @@ -154,7 +154,7 @@ public void testSecurityProfileUser() { RefreshAction.NAME, CreateIndexAction.NAME, PutMappingAction.NAME, - DeleteIndexAction.NAME + TransportDeleteIndexAction.TYPE.name() ); checkIndexAccess(role, randomFrom(sampleAllowedActions), ".security-profile", true); checkIndexAccess(role, randomFrom(sampleAllowedActions), ".security-profile-" + randomIntBetween(1, 9), true); @@ -185,7 +185,7 @@ public void testAsyncSearchUser() { RefreshAction.NAME, CreateIndexAction.NAME, PutMappingAction.NAME, - DeleteIndexAction.NAME + TransportDeleteIndexAction.TYPE.name() ); checkIndexAccess(role, randomFrom(sampleAllowedActions), XPackPlugin.ASYNC_RESULTS_INDEX, true); checkIndexAccess( @@ -216,7 +216,7 @@ public void testStorageUser() { TransportGetAction.TYPE.name(), BulkAction.NAME, PutMappingAction.NAME, - DeleteIndexAction.NAME + TransportDeleteIndexAction.TYPE.name() ); checkIndexAccess(role, randomFrom(sampleDeniedActions), randomAlphaOfLengthBetween(4, 8), false); checkIndexAccess(role, randomFrom(sampleDeniedActions), ".ds-" + randomAlphaOfLengthBetween(4, 8), false); @@ -246,7 +246,7 @@ public void testDataStreamLifecycleUser() { final List sampleIndexActions = List.of( RolloverAction.NAME, - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), ForceMergeAction.NAME, IndicesStatsAction.NAME, UpdateSettingsAction.NAME, @@ -256,7 +256,7 @@ public void testDataStreamLifecycleUser() { final List sampleSystemDataStreamActions = List.of( RolloverAction.NAME, - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), ForceMergeAction.NAME, IndicesStatsAction.NAME, UpdateSettingsAction.NAME, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index f1d03531e4b6b..a91cf5d1606c6 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -143,12 +143,33 @@ } }, { - "ecs_usage_scaled_float": { + "ecs_usage_double_scaled_float": { "mapping": { "type": "scaled_float", "scaling_factor": 1000 }, - "path_match": "*.usage" + "path_match": "*.usage", + "match_mapping_type": "double" + } + }, + { + "ecs_usage_long_scaled_float": { + "mapping": { + "type": "scaled_float", + "scaling_factor": 1000 + }, + "path_match": "*.usage", + "match_mapping_type": "long" + } + }, + { + "ecs_usage_long_scaled_float": { + "mapping": { + "type": "scaled_float", + "scaling_factor": 1000 + }, + "path_match": "*.usage", + "match_mapping_type": "string" } }, { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 9ab7f7dea9b2d..6d46d8bf8db94 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -17,8 +17,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; @@ -1653,22 +1652,24 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E }""", XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)).actionGet(); assertEquals(RestStatus.CREATED, indexRequest.status()); - SearchResponse sourceSearchResponse = client().search( - new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) - ).actionGet(); - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); - Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); - assertNotNull(sourceDocMap); - Map dataField = ((Map) sourceDocMap.get("data")); - assertNotNull(dataField); - Map fieldsField = ((Map) dataField.get("fields")); - assertNotNull(fieldsField); - Map periodField = ((Map) fieldsField.get("period")); - assertNotNull(periodField); - assertThat(periodField.get("gte"), is(equalTo("2021/08/20 at 12:00"))); - assertThat(periodField.get("lte"), is(equalTo("2021/08/28 at 23:00"))); - assertThat(fieldsField.get("status"), is(equalTo("enrolled"))); - assertThat(fieldsField.get("field3"), is(equalTo("ignored"))); + assertResponse( + client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), + sourceSearchResponse -> { + assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); + assertNotNull(sourceDocMap); + Map dataField = ((Map) sourceDocMap.get("data")); + assertNotNull(dataField); + Map fieldsField = ((Map) dataField.get("fields")); + assertNotNull(fieldsField); + Map periodField = ((Map) fieldsField.get("period")); + assertNotNull(periodField); + assertThat(periodField.get("gte"), is(equalTo("2021/08/20 at 12:00"))); + assertThat(periodField.get("lte"), is(equalTo("2021/08/28 at 23:00"))); + assertThat(fieldsField.get("status"), is(equalTo("enrolled"))); + assertThat(fieldsField.get("field3"), is(equalTo("ignored"))); + } + ); String policyName = "test1"; List enrichFields = List.of("data.fields.status", "missingField"); @@ -1716,42 +1717,48 @@ public void testRunnerTwoObjectLevelsSourceMappingDateRangeWithFormat() throws E } """); - SearchResponse enrichSearchResponse = client().search( - new SearchRequest(".enrich-test1").source( - SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) - ) - ).actionGet(); - - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertResponse( + client().search( + new SearchRequest(".enrich-test1").source( + SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) + ) + ), + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + ); - enrichSearchResponse = client().search( - new SearchRequest(".enrich-test1").source( - SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-20T14:00:00Z")) - ) - ).actionGet(); + assertResponse( + client().search( + new SearchRequest(".enrich-test1").source( + SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-20T14:00:00Z")) + ) + ), + enrichSearchResponse -> { + assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); + assertNotNull(enrichDocument); + assertThat(enrichDocument.size(), is(equalTo(1))); + Map resultDataField = ((Map) enrichDocument.get("data")); + assertNotNull(resultDataField); + Map resultFieldsField = ((Map) resultDataField.get("fields")); + assertNotNull(resultFieldsField); + assertThat(resultFieldsField.size(), is(equalTo(2))); + Map resultsPeriodField = ((Map) resultFieldsField.get("period")); + assertNotNull(resultsPeriodField); + assertThat(resultsPeriodField.get("gte"), is(equalTo("2021/08/20 at 12:00"))); + assertThat(resultsPeriodField.get("lte"), is(equalTo("2021/08/28 at 23:00"))); + assertThat(resultFieldsField.get("status"), is(equalTo("enrolled"))); + assertNull(resultFieldsField.get("field3")); + } + ); - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); - Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); - assertNotNull(enrichDocument); - assertThat(enrichDocument.size(), is(equalTo(1))); - Map resultDataField = ((Map) enrichDocument.get("data")); - assertNotNull(resultDataField); - Map resultFieldsField = ((Map) resultDataField.get("fields")); - assertNotNull(resultFieldsField); - assertThat(resultFieldsField.size(), is(equalTo(2))); - Map resultsPeriodField = ((Map) resultFieldsField.get("period")); - assertNotNull(periodField); - assertThat(resultsPeriodField.get("gte"), is(equalTo("2021/08/20 at 12:00"))); - assertThat(resultsPeriodField.get("lte"), is(equalTo("2021/08/28 at 23:00"))); - assertThat(resultFieldsField.get("status"), is(equalTo("enrolled"))); - assertNull(resultFieldsField.get("field3")); - - enrichSearchResponse = client().search( - new SearchRequest(".enrich-test1").source( - SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) - ) - ).actionGet(); - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertResponse( + client().search( + new SearchRequest(".enrich-test1").source( + SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) + ) + ), + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + ); // Validate segments validateSegments(createdEnrichIndex, 1); @@ -2384,7 +2391,7 @@ protected void ) { if (action.equals(EnrichReindexAction.INSTANCE)) { super.doExecute( - DeleteIndexAction.INSTANCE, + TransportDeleteIndexAction.TYPE, new DeleteIndexRequest(createdEnrichIndex), listener.delegateFailureAndWrap((delegate, response) -> { if (response.isAcknowledged() == false) { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java index e986f6e9e0656..079af561e00c9 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java @@ -90,8 +90,10 @@ public void testCoordinateLookups() { // Replying a response and that should trigger another coordination round MultiSearchResponse.Item[] responseItems = new MultiSearchResponse.Item[5]; for (int i = 0; i < 5; i++) { + emptyResponse.incRef(); responseItems[i] = new MultiSearchResponse.Item(emptyResponse, null); } + emptyResponse.decRef(); final MultiSearchResponse res1 = new MultiSearchResponse(responseItems, 1L); try { lookupFunction.capturedConsumers.get(0).accept(res1, null); @@ -102,6 +104,7 @@ public void testCoordinateLookups() { // Replying last response, resulting in an empty queue and no outstanding requests. responseItems = new MultiSearchResponse.Item[5]; for (int i = 0; i < 5; i++) { + emptyResponse.incRef(); responseItems[i] = new MultiSearchResponse.Item(emptyResponse, null); } var res2 = new MultiSearchResponse(responseItems, 1L); @@ -318,7 +321,11 @@ public void testReduce() { Map> shardResponses = new HashMap<>(); try { - MultiSearchResponse.Item item1 = new MultiSearchResponse.Item(emptySearchResponse(), null); + var empty = emptySearchResponse(); + // use empty response 3 times below and we start out with ref-count 1 + empty.incRef(); + empty.incRef(); + MultiSearchResponse.Item item1 = new MultiSearchResponse.Item(empty, null); itemsPerIndex.put("index1", List.of(new Tuple<>(0, null), new Tuple<>(1, null), new Tuple<>(2, null))); shardResponses.put( "index1", @@ -329,7 +336,11 @@ public void testReduce() { itemsPerIndex.put("index2", List.of(new Tuple<>(3, null), new Tuple<>(4, null), new Tuple<>(5, null))); shardResponses.put("index2", new Tuple<>(null, failure)); - MultiSearchResponse.Item item2 = new MultiSearchResponse.Item(emptySearchResponse(), null); + // use empty response 3 times below + empty.incRef(); + empty.incRef(); + empty.incRef(); + MultiSearchResponse.Item item2 = new MultiSearchResponse.Item(empty, null); itemsPerIndex.put("index3", List.of(new Tuple<>(6, null), new Tuple<>(7, null), new Tuple<>(8, null))); shardResponses.put( "index3", diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 19c551d85617f..3fa4a90a6e734 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -88,4 +88,9 @@ public void closeInternal() { public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index e834a1c171e49..9d3f69bfaa981 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -89,4 +89,9 @@ public void closeInternal() { public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 62319e9c100cb..b23a448c58336 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -88,4 +88,9 @@ public void closeInternal() { public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index ccc242dd1a573..028ef35577753 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -88,4 +88,9 @@ public void closeInternal() { public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index 94697b3136fce..589a9341188fc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -88,4 +88,9 @@ public void closeInternal() { public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 91b6bb0ffac87..8772e633ff14b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -96,4 +96,9 @@ $endif$ public void allowPassingToDifferentDriver() { vector.allowPassingToDifferentDriver(); } + + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index a9c6666ce6f94..826c25f3e7828 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -44,7 +44,6 @@ import java.util.Objects; import java.util.TreeMap; import java.util.function.Supplier; -import java.util.stream.Collectors; /** * Operator that extracts doc_values from a Lucene index out of pages that have been produced by {@link LuceneSourceOperator} @@ -77,7 +76,22 @@ public Operator get(DriverContext driverContext) { @Override public String describe() { - return "ValuesSourceReaderOperator[field = " + fields.stream().map(f -> f.name).collect(Collectors.joining(", ")) + "]"; + StringBuilder sb = new StringBuilder(); + sb.append("ValuesSourceReaderOperator[fields = ["); + if (fields.size() < 10) { + boolean first = true; + for (FieldInfo f : fields) { + if (first) { + first = false; + } else { + sb.append(", "); + } + sb.append(f.name); + } + } else { + sb.append(fields.size()).append(" fields"); + } + return sb.append("]]").toString(); } } @@ -377,7 +391,22 @@ private LeafReaderContext ctx(int shard, int segment) { @Override public String toString() { - return "ValuesSourceReaderOperator[field = " + fields.stream().map(f -> f.info.name).collect(Collectors.joining(", ")) + "]"; + StringBuilder sb = new StringBuilder(); + sb.append("ValuesSourceReaderOperator[fields = ["); + if (fields.size() < 10) { + boolean first = true; + for (FieldWork f : fields) { + if (first) { + first = false; + } else { + sb.append(", "); + } + sb.append(f.info.name); + } + } else { + sb.append(fields.size()).append(" fields"); + } + return sb.append("]]").toString(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index 27b0380ecfea0..96e5de20ba35c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -648,6 +648,22 @@ public void testAllowPassingBlockToDifferentContext() throws Exception { localBreaker2.close(); } + public void testOwningFactoryOfVectorBlock() { + BlockFactory parentFactory = blockFactory(ByteSizeValue.ofBytes(between(1024, 4096))); + LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(parentFactory.breaker(), between(0, 1024), between(0, 1024)); + BlockFactory localFactory = new BlockFactory(localBreaker, bigArrays, parentFactory); + int numValues = between(2, 10); + try (var builder = localFactory.newIntVectorBuilder(numValues)) { + for (int i = 0; i < numValues; i++) { + builder.appendInt(randomInt()); + } + IntBlock block = builder.build().asBlock(); + assertThat(block.blockFactory(), equalTo(localFactory)); + block.allowPassingToDifferentDriver(); + assertThat(block.blockFactory(), equalTo(parentFactory)); + } + } + static BytesRef randomBytesRef() { return new BytesRef(randomByteArrayOfLength(between(1, 20))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 03815dcdaea78..f6310d826c989 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -245,7 +245,7 @@ private void initIndex(int size, int commitEvery) throws IOException { @Override protected String expectedDescriptionOfSimple() { - return "ValuesSourceReaderOperator[field = long]"; + return "ValuesSourceReaderOperator[fields = [long]]"; } @Override @@ -1354,4 +1354,18 @@ private void testSequentialStoredFields(boolean sequential, int docCount) { ); assertDriverContext(driverContext); } + + public void testDescriptionOfMany() { + List cases = infoAndChecksForEachType(randomFrom(Block.MvOrdering.values())); + + ValuesSourceReaderOperator.Factory factory = new ValuesSourceReaderOperator.Factory( + cases.stream().map(c -> c.info).toList(), + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE)), + 0 + ); + assertThat(factory.describe(), equalTo("ValuesSourceReaderOperator[fields = [" + cases.size() + " fields]]")); + try (Operator op = factory.get(driverContext())) { + assertThat(op.toString(), equalTo("ValuesSourceReaderOperator[fields = [" + cases.size() + " fields]]")); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index fe6061ee90779..2f1cc2981766e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -83,6 +83,7 @@ public final void testSimpleLargeInput() { * {@link #smallEnoughToCircuitBreak} and assert that it breaks * in a sane way. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101824") public final void testSimpleCircuitBreaking() { /* * We build two CircuitBreakers - one for the input blocks and one for the operation itself. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index ef3e43aa6d8ab..95da19e38a05d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -569,7 +569,7 @@ message:keyword | ts:keyword | level:keyword // end::dissectRightPaddingModifier-result[] ; -dissectEmptyRightPaddingModifier +dissectEmptyRightPaddingModifier#[skip:-8.11.2, reason:Support for empty right padding modifiers introduced in 8.11.2] // tag::dissectEmptyRightPaddingModifier[] ROW message="[1998-08-10T17:15:42] [WARN]" | DISSECT message "[%{ts}]%{->}[%{level}]" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index a6e24e9d45289..daf153051bb89 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -328,14 +328,14 @@ base:integer | exponent:double | s:double // end::powID-sqrt-result[] ; -powSqrtNeg#[skip:-8.11.99,reason:return type changed in 8.12] +powSqrtNeg#[skip:-8.12.99,reason:warning message changed in 8.13] // tag::powNeg-sqrt[] ROW base = -4, exponent = 0.5 | EVAL s = POW(base, exponent) // end::powNeg-sqrt[] ; warning:Line 2:12: evaluation of [POW(base, exponent)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:12: java.lang.ArithmeticException: invalid result when computing pow +warning:Line 2:12: java.lang.ArithmeticException: not a finite double number: NaN // tag::powNeg-sqrt-result[] base:integer | exponent:double | s:double @@ -407,10 +407,10 @@ x:double 1.0 ; -powIntULOverrun#[skip:-8.11.99,reason:return type changed in 8.12] +powIntULOverrun#[skip:-8.12.99,reason:warning message changed in 8.13] row x = pow(2, 9223372036854775808); warning:Line 1:9: evaluation of [pow(2, 9223372036854775808)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:9: java.lang.ArithmeticException: invalid result when computing pow +warning:Line 1:9: java.lang.ArithmeticException: not a finite double number: Infinity x:double null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 0ec7c2d6f84e8..e5039377b4a40 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -7,11 +7,16 @@ long:long |pt:geo_point ; convertFromString#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +// tag::to_geopoint-str[] row wkt = "POINT(42.97109630194 14.7552534413725)" -| eval pt = to_geopoint(wkt); +| eval pt = to_geopoint(wkt) +// end::to_geopoint-str[] +; +// tag::to_geopoint-str-result[] wkt:keyword |pt:geo_point "POINT(42.97109630194 14.7552534413725)" |POINT(42.97109630194 14.7552534413725) +// end::to_geopoint-str-result[] ; convertFromLongArray#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] @@ -57,14 +62,18 @@ long:long |pt:cartesian_point ; convertCartesianFromString#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +// tag::to_cartesianpoint-str[] row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | mv_expand wkt | eval pt = to_cartesianpoint(wkt) -| eval l = to_long(pt); +// end::to_cartesianpoint-str[] +; -wkt:keyword |pt:cartesian_point |l:long -"POINT(4297.11 -1475.53)" |POINT(4297.11 -1475.53) |5009771769843126025 -"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) |5038656556796611666 +// tag::to_cartesianpoint-str-result[] +wkt:keyword |pt:cartesian_point +"POINT(4297.11 -1475.53)" |POINT(4297.11 -1475.53) +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) +// end::to_cartesianpoint-str-result[] ; convertCartesianFromLongArray#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 2d1d01e42b509..e499d3b783bb8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -91,7 +91,7 @@ public void setupIndex() throws IOException { NUM_DOCS = between(4 * PAGE_SIZE, 5 * PAGE_SIZE); READ_DESCRIPTION = """ \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = PAGE_SIZE, limit = 2147483647] - \\_ValuesSourceReaderOperator[field = pause_me] + \\_ValuesSourceReaderOperator[fields = [pause_me]] \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] \\_ExchangeSinkOperator""".replace("PAGE_SIZE", Integer.toString(PAGE_SIZE)); MERGE_DESCRIPTION = """ @@ -175,7 +175,7 @@ public void testTaskContents() throws Exception { luceneSources++; continue; } - if (o.operator().equals("ValuesSourceReaderOperator[field = pause_me]")) { + if (o.operator().equals("ValuesSourceReaderOperator[fields = [pause_me]]")) { ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); assertMap( oStatus.readersBuilt(), diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index bdf1fd8616559..8b581cbac5980 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import java.lang.ArithmeticException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -34,11 +35,21 @@ public Block evalVector(Vector v) { DoubleVector vector = (DoubleVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); + try { + return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0), positionCount); + } catch (ArithmeticException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } } try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendDouble(evalValue(vector, p)); + try { + builder.appendDouble(evalValue(vector, p)); + } catch (ArithmeticException e) { + registerException(e); + builder.appendNull(); + } } return builder.build(); } @@ -61,13 +72,17 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - double value = evalValue(block, i); - if (positionOpened == false && valueCount > 1) { - builder.beginPositionEntry(); - positionOpened = true; + try { + double value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendDouble(value); + valuesAppended = true; + } catch (ArithmeticException e) { + registerException(e); } - builder.appendDouble(value); - valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index bf97faea7ae74..6959c04345d31 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -351,7 +351,17 @@ private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) p.source(), p.child(), resolved, - new ReferenceAttribute(resolved.source(), resolved.name(), resolved.dataType(), null, resolved.nullable(), null, false) + resolved.resolved() + ? new ReferenceAttribute( + resolved.source(), + resolved.name(), + resolved.dataType(), + null, + resolved.nullable(), + null, + false + ) + : resolved ); } return p; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index 44f8507d880d8..c858bdbdb3993 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.NumericUtils; import java.util.List; import java.util.Map; @@ -64,8 +65,8 @@ public DataType dataType() { return DOUBLE; } - @ConvertEvaluator + @ConvertEvaluator(warnExceptions = { ArithmeticException.class }) static double process(double deg) { - return Math.toDegrees(deg); + return NumericUtils.asFiniteNumber(Math.toDegrees(deg)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 0658dcccbbb48..57f32cf2212d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; import java.util.Arrays; import java.util.List; @@ -71,15 +72,7 @@ public Object fold() { @Evaluator(warnExceptions = { ArithmeticException.class }) static double process(double base, double exponent) { - return validateAsDouble(base, exponent); - } - - private static double validateAsDouble(double base, double exponent) { - double result = Math.pow(base, exponent); - if (Double.isNaN(result) || Double.isInfinite(result)) { - throw new ArithmeticException("invalid result when computing pow"); - } - return result; + return NumericUtils.asFiniteNumber(Math.pow(base, exponent)); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 03a385592ac63..93456ff30c4cd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1426,6 +1426,14 @@ public void testMissingAttributeException_InChainedEval() { assertThat(e.getMessage(), containsString("Unknown column [x5], did you mean any of [x1, x2, x3]?")); } + public void testUnresolvedMvExpand() { + var e = expectThrows(VerificationException.class, () -> analyze("row foo = 1 | mv_expand bar")); + assertThat(e.getMessage(), containsString("Unknown column [bar]")); + + e = expectThrows(VerificationException.class, () -> analyze("row foo = 1 | keep foo, foo | mv_expand foo")); + assertThat(e.getMessage(), containsString("Reference [foo] is ambiguous (to disambiguate use quotes or qualifiers)")); + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b41fbf4fd7443..500a7a1b14195 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -998,23 +998,33 @@ public static void renderTypesTable() throws IOException { if (System.getProperty("generateDocs") == null) { return; } - String name = functionName(); // TODO types table for operators + String name = functionName(); + if (binaryOperator(name) != null) { + renderTypesTable(List.of("lhs", "rhs")); + return; + } + if (unaryOperator(name) != null) { + renderTypesTable(List.of("v")); + return; + } FunctionDefinition definition = definition(name); - if (definition == null) { - LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function isn't registered"); + if (definition != null) { + renderTypesTable(EsqlFunctionRegistry.description(definition).argNames()); return; } + LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function isn't registered"); + } - List args = EsqlFunctionRegistry.description(definition).argNames(); + private static void renderTypesTable(List argNames) throws IOException { StringBuilder header = new StringBuilder(); - for (String arg : args) { + for (String arg : argNames) { header.append(arg).append(" | "); } header.append("result"); List table = new ArrayList<>(); for (Map.Entry, DataType> sig : signatures.entrySet()) { - if (sig.getKey().size() != args.size()) { + if (sig.getKey().size() != argNames.size()) { continue; } StringBuilder b = new StringBuilder(); @@ -1057,9 +1067,9 @@ private static String binaryOperator(String name) { case "div" -> "/"; case "equals" -> "=="; case "greater_than" -> ">"; - case "greater_than_or_equal_to" -> ">="; + case "greater_than_or_equal" -> ">="; case "less_than" -> "<"; - case "less_than_or_equal_to" -> "<="; + case "less_than_or_equal" -> "<="; case "mod" -> "%"; case "mul" -> "*"; case "not_equals" -> "!="; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java index b3e0c65f0c8f8..776782b3828f5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; @@ -23,7 +22,6 @@ import java.util.function.Function; import java.util.function.Supplier; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102987") public class ToDegreesTests extends AbstractFunctionTestCase { public ToDegreesTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -62,14 +60,33 @@ public static Iterable parameters() { UNSIGNED_LONG_MAX, List.of() ); - TestCaseSupplier.forUnaryDouble( + TestCaseSupplier.forUnaryDouble(suppliers, "ToDegreesEvaluator[field=Attribute[channel=0]]", DataTypes.DOUBLE, d -> { + double deg = Math.toDegrees(d); + return Double.isNaN(deg) || Double.isInfinite(deg) ? null : deg; + }, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, d -> { + double deg = Math.toDegrees(d); + ArrayList warnings = new ArrayList<>(2); + if (Double.isNaN(deg) || Double.isInfinite(deg)) { + warnings.add("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded."); + warnings.add("Line -1:-1: java.lang.ArithmeticException: not a finite double number: " + deg); + } + return warnings; + }); + TestCaseSupplier.unary( suppliers, "ToDegreesEvaluator[field=Attribute[channel=0]]", + List.of( + new TestCaseSupplier.TypedDataSupplier("Double.MAX_VALUE", () -> Double.MAX_VALUE, DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("-Double.MAX_VALUE", () -> -Double.MAX_VALUE, DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("Double.POSITIVE_INFINITY", () -> Double.POSITIVE_INFINITY, DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("Double.NEGATIVE_INFINITY", () -> Double.NEGATIVE_INFINITY, DataTypes.DOUBLE) + ), DataTypes.DOUBLE, - Math::toDegrees, - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - List.of() + d -> null, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: not a finite double number: " + ((double) d > 0 ? "Infinity" : "-Infinity") + ) ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index c8b316d8e6bfb..f4cf955c46bb8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -74,7 +74,7 @@ public static Iterable parameters() { Double.POSITIVE_INFINITY, List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: invalid result when computing pow" + "Line -1:-1: java.lang.ArithmeticException: not a finite double number: Infinity" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 30de8ecae135b..6fce2646012af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -77,6 +77,7 @@ public void testMatchAll() throws IOException { testCase(new SingleValueQuery(new MatchAll(Source.EMPTY), "foo").asBuilder(), false, false, this::runCase); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102997") public void testMatchSome() throws IOException { int max = between(1, 100); testCase( diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java index faeaee2da9307..ca848c8bb8c44 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java @@ -68,6 +68,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -125,32 +126,40 @@ public void testCloseFreezeAndOpen() throws Exception { .setScroll(TimeValue.timeValueMinutes(1)) .setSize(1) .get(); - do { - assertHitCount(searchResponse, 3); - assertEquals(1, searchResponse.getHits().getHits().length); - SearchService searchService = getInstanceFromNode(SearchService.class); - assertThat(searchService.getActiveContexts(), Matchers.greaterThanOrEqualTo(1)); - for (int i = 0; i < 2; i++) { - shard = indexService.getShard(i); - engine = IndexShardTestCase.getEngine(shard); - // scrolls keep the reader open - assertTrue(((FrozenEngine) engine).isReaderOpen()); - } - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - } while (searchResponse.getHits().getHits().length > 0); + try { + do { + assertHitCount(searchResponse, 3); + assertEquals(1, searchResponse.getHits().getHits().length); + SearchService searchService = getInstanceFromNode(SearchService.class); + assertThat(searchService.getActiveContexts(), Matchers.greaterThanOrEqualTo(1)); + for (int i = 0; i < 2; i++) { + shard = indexService.getShard(i); + engine = IndexShardTestCase.getEngine(shard); + // scrolls keep the reader open + assertTrue(((FrozenEngine) engine).isReaderOpen()); + } + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + } while (searchResponse.getHits().getHits().length > 0); + } finally { + searchResponse.decRef(); + } client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); String pitId = openReaders(TimeValue.timeValueMinutes(1), indexName); try { for (int from = 0; from < 3; from++) { - searchResponse = client().prepareSearch() - .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) - .setPointInTime(new PointInTimeBuilder(pitId)) - .setSize(1) - .setFrom(from) - .get(); - assertHitCount(searchResponse, 3); - assertEquals(1, searchResponse.getHits().getHits().length); + assertResponse( + client().prepareSearch() + .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) + .setPointInTime(new PointInTimeBuilder(pitId)) + .setSize(1) + .setFrom(from), + response -> { + assertHitCount(response, 3); + assertEquals(1, response.getHits().getHits().length); + } + ); SearchService searchService = getInstanceFromNode(SearchService.class); assertThat(searchService.getActiveContexts(), Matchers.greaterThanOrEqualTo(1)); for (int i = 0; i < 2; i++) { @@ -197,7 +206,8 @@ public void testSearchAndGetAPIsAreThrottled() throws IOException { client().prepareSearch(indexName) .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) .setSearchType(SearchType.QUERY_THEN_FETCH) - .get(); + .get() + .decRef(); // in total 4 refreshes 1x query & 1x fetch per shard (we have 2) numRefreshes += 3; } diff --git a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java index 7f1a0f2bcc2cb..d93c24356422f 100644 --- a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java +++ b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java @@ -128,41 +128,45 @@ public void testGetPipelinesByExplicitAndWildcardIds() { SearchResponse.Clusters.EMPTY, null ); + try { + + SearchResponse mockResponse = mock(SearchResponse.class); + when(mockResponse.getHits()).thenReturn(prepareSearchHits()); + + GetPipelineRequest request = new GetPipelineRequest(List.of("1", "2", "3*")); + AtomicReference failure = new AtomicReference<>(); + + // Set up an ActionListener for the actual test conditions + ActionListener testActionListener = new ActionListener<>() { + @Override + public void onResponse(GetPipelineResponse getPipelineResponse) { + assertThat(getPipelineResponse, is(notNullValue())); + assertThat(getPipelineResponse.pipelines().size(), equalTo(3)); + assertTrue(getPipelineResponse.pipelines().containsKey("1")); + assertTrue(getPipelineResponse.pipelines().containsKey("2")); + assertTrue(getPipelineResponse.pipelines().containsKey("3*")); + } - SearchResponse mockResponse = mock(SearchResponse.class); - when(mockResponse.getHits()).thenReturn(prepareSearchHits()); - - GetPipelineRequest request = new GetPipelineRequest(List.of("1", "2", "3*")); - AtomicReference failure = new AtomicReference<>(); - - // Set up an ActionListener for the actual test conditions - ActionListener testActionListener = new ActionListener<>() { - @Override - public void onResponse(GetPipelineResponse getPipelineResponse) { - assertThat(getPipelineResponse, is(notNullValue())); - assertThat(getPipelineResponse.pipelines().size(), equalTo(3)); - assertTrue(getPipelineResponse.pipelines().containsKey("1")); - assertTrue(getPipelineResponse.pipelines().containsKey("2")); - assertTrue(getPipelineResponse.pipelines().containsKey("3*")); - } + @Override + public void onFailure(Exception e) { + failure.set(e); + } + }; - @Override - public void onFailure(Exception e) { - failure.set(e); + TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(); + try (var threadPool = createThreadPool()) { + final var client = getMockClient(threadPool, searchResponse); + new TransportGetPipelineAction(transportService, mock(ActionFilters.class), client).doExecute( + null, + request, + testActionListener + ); } - }; - TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(); - try (var threadPool = createThreadPool()) { - final var client = getMockClient(threadPool, searchResponse); - new TransportGetPipelineAction(transportService, mock(ActionFilters.class), client).doExecute( - null, - request, - testActionListener - ); + assertNull(failure.get()); + } finally { + searchResponse.decRef(); } - - assertNull(failure.get()); } public void testMissingIndexHandling() throws Exception { diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java index 24ad009990b0b..2da4e2802bdbe 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/SearchIdleTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; @@ -40,6 +40,7 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -130,12 +131,13 @@ public void testSearchIdleConstantKeywordMatchNoIndex() throws InterruptedExcept assertIdleShard(beforeStatsResponse); // WHEN - final SearchResponse searchResponse = search("test*", "constant_keyword", randomAlphaOfLength(5), 5); - assertEquals(RestStatus.OK, searchResponse.status()); - // NOTE: we need an empty result from at least one shard - assertEquals(idleIndexShardsCount + activeIndexShardsCount - 1, searchResponse.getSkippedShards()); - assertEquals(0, searchResponse.getFailedShards()); - assertEquals(0, searchResponse.getHits().getHits().length); + assertResponse(search("test*", "constant_keyword", randomAlphaOfLength(5), 5), searchResponse -> { + assertEquals(RestStatus.OK, searchResponse.status()); + // NOTE: we need an empty result from at least one shard + assertEquals(idleIndexShardsCount + activeIndexShardsCount - 1, searchResponse.getSkippedShards()); + assertEquals(0, searchResponse.getFailedShards()); + assertEquals(0, searchResponse.getHits().getHits().length); + }); // THEN final IndicesStatsResponse afterStatsResponse = indicesAdmin().prepareStats("test*").get(); @@ -202,11 +204,12 @@ public void testSearchIdleConstantKeywordMatchOneIndex() throws InterruptedExcep assertIdleShard(activeIndexStatsBefore); // WHEN - final SearchResponse searchResponse = search("test*", "constant_keyword", "constant_value2", 5); - assertEquals(RestStatus.OK, searchResponse.status()); - assertEquals(idleIndexShardsCount, searchResponse.getSkippedShards()); - assertEquals(0, searchResponse.getFailedShards()); - Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> assertEquals("test2", searchHit.getIndex())); + assertResponse(search("test*", "constant_keyword", "constant_value2", 5), searchResponse -> { + assertEquals(RestStatus.OK, searchResponse.status()); + assertEquals(idleIndexShardsCount, searchResponse.getSkippedShards()); + assertEquals(0, searchResponse.getFailedShards()); + Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> assertEquals("test2", searchHit.getIndex())); + }); // THEN final IndicesStatsResponse idleIndexStatsAfter = indicesAdmin().prepareStats(idleIndex).get(); @@ -265,18 +268,19 @@ public void testSearchIdleConstantKeywordMatchTwoIndices() throws InterruptedExc assertIdleShard(beforeStatsResponse); // WHEN - final SearchResponse searchResponse = search("test*", "constant_keyword", "constant", 5); - - // THEN - assertEquals(RestStatus.OK, searchResponse.status()); - assertEquals(0, searchResponse.getSkippedShards()); - assertEquals(0, searchResponse.getFailedShards()); - assertArrayEquals( - new String[] { "test1", "test2" }, - Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).sorted().toArray() - ); - final IndicesStatsResponse afterStatsResponse = indicesAdmin().prepareStats("test*").get(); - assertIdleShardsRefreshStats(beforeStatsResponse, afterStatsResponse); + assertResponse(search("test*", "constant_keyword", "constant", 5), searchResponse -> { + + // THEN + assertEquals(RestStatus.OK, searchResponse.status()); + assertEquals(0, searchResponse.getSkippedShards()); + assertEquals(0, searchResponse.getFailedShards()); + assertArrayEquals( + new String[] { "test1", "test2" }, + Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).sorted().toArray() + ); + final IndicesStatsResponse afterStatsResponse = indicesAdmin().prepareStats("test*").get(); + assertIdleShardsRefreshStats(beforeStatsResponse, afterStatsResponse); + }); } public void testSearchIdleWildcardQueryMatchOneIndex() throws InterruptedException { @@ -327,16 +331,17 @@ public void testSearchIdleWildcardQueryMatchOneIndex() throws InterruptedExcepti assertIdleShard(activeIndexStatsBefore); // WHEN - final SearchResponse searchResponse = client().prepareSearch("test*") - .setQuery(new WildcardQueryBuilder("constant_keyword", "test2*")) - .setPreFilterShardSize(5) - .get(); - - // THEN - assertEquals(RestStatus.OK, searchResponse.status()); - assertEquals(idleIndexShardsCount, searchResponse.getSkippedShards()); - assertEquals(0, searchResponse.getFailedShards()); - Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> assertEquals("test2", searchHit.getIndex())); + assertResponse( + client().prepareSearch("test*").setQuery(new WildcardQueryBuilder("constant_keyword", "test2*")).setPreFilterShardSize(5), + searchResponse -> { + + // THEN + assertEquals(RestStatus.OK, searchResponse.status()); + assertEquals(idleIndexShardsCount, searchResponse.getSkippedShards()); + assertEquals(0, searchResponse.getFailedShards()); + Arrays.stream(searchResponse.getHits().getHits()).forEach(searchHit -> assertEquals("test2", searchHit.getIndex())); + } + ); final IndicesStatsResponse idleIndexStatsAfter = indicesAdmin().prepareStats(idleIndex).get(); assertIdleShardsRefreshStats(idleIndexStatsBefore, idleIndexStatsAfter); @@ -345,11 +350,8 @@ public void testSearchIdleWildcardQueryMatchOneIndex() throws InterruptedExcepti assertThat(active, empty()); } - private SearchResponse search(final String index, final String field, final String value, int preFilterShardSize) { - return client().prepareSearch(index) - .setQuery(new MatchPhraseQueryBuilder(field, value)) - .setPreFilterShardSize(preFilterShardSize) - .get(); + private SearchRequestBuilder search(final String index, final String field, final String value, int preFilterShardSize) { + return client().prepareSearch(index).setQuery(new MatchPhraseQueryBuilder(field, value)).setPreFilterShardSize(preFilterShardSize); } private static void assertIdleShard(final IndicesStatsResponse statsResponse) { diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java index 3407a71f23265..24335a7892c9d 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; @@ -41,10 +40,10 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.SuiteScopeTestCase public class UnsignedLongTests extends ESIntegTestCase { @@ -98,78 +97,74 @@ public void setupSuiteScopeCluster() throws Exception { public void testSort() { for (String index : new String[] { "idx", "idx-sort" }) { // asc sort - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) - .setSize(numDocs) - .addSort("ul_field", SortOrder.ASC) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, numDocs); - int i = 0; - for (SearchHit hit : hits) { - assertEquals(values[i++], hit.getSortValues()[0]); + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).setSize(numDocs).addSort("ul_field", SortOrder.ASC), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, numDocs); + int i = 0; + for (SearchHit hit : hits) { + assertEquals(values[i++], hit.getSortValues()[0]); + } } - } + ); // desc sort - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) - .setSize(numDocs) - .addSort("ul_field", SortOrder.DESC) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, numDocs); - int i = numDocs - 1; - for (SearchHit hit : hits) { - assertEquals(values[i--], hit.getSortValues()[0]); + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).setSize(numDocs).addSort("ul_field", SortOrder.DESC), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, numDocs); + int i = numDocs - 1; + for (SearchHit hit : hits) { + assertEquals(values[i--], hit.getSortValues()[0]); + } } - } + ); // asc sort with search_after as Long - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) - .searchAfter(new Long[] { 100L }) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, 7); - int i = 3; - for (SearchHit hit : hits) { - assertEquals(values[i++], hit.getSortValues()[0]); + .searchAfter(new Long[] { 100L }), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, 7); + int i = 3; + for (SearchHit hit : hits) { + assertEquals(values[i++], hit.getSortValues()[0]); + } } - } + ); // asc sort with search_after as BigInteger - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) - .searchAfter(new BigInteger[] { new BigInteger("18446744073709551614") }) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, 2); - int i = 8; - for (SearchHit hit : hits) { - assertEquals(values[i++], hit.getSortValues()[0]); + .searchAfter(new BigInteger[] { new BigInteger("18446744073709551614") }), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, 2); + int i = 8; + for (SearchHit hit : hits) { + assertEquals(values[i++], hit.getSortValues()[0]); + } } - } + ); // asc sort with search_after as BigInteger in String format - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) - .searchAfter(new String[] { "18446744073709551614" }) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, 2); - int i = 8; - for (SearchHit hit : hits) { - assertEquals(values[i++], hit.getSortValues()[0]); + .searchAfter(new String[] { "18446744073709551614" }), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, 2); + int i = 8; + for (SearchHit hit : hits) { + assertEquals(values[i++], hit.getSortValues()[0]); + } } - } + ); // asc sort with search_after of negative value should fail { SearchRequestBuilder srb = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) @@ -189,28 +184,26 @@ public void testSort() { assertThat(exception.getCause().getMessage(), containsString("Failed to parse search_after value")); } // desc sort with search_after as BigInteger - { - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) + assertNoFailuresAndResponse( + prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.DESC) - .searchAfter(new BigInteger[] { new BigInteger("18446744073709551615") }) - .get(); - assertNoFailures(response); - SearchHit[] hits = response.getHits().getHits(); - assertEquals(hits.length, 8); - int i = 7; - for (SearchHit hit : hits) { - assertEquals(values[i--], hit.getSortValues()[0]); + .searchAfter(new BigInteger[] { new BigInteger("18446744073709551615") }), + response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(hits.length, 8); + int i = 7; + for (SearchHit hit : hits) { + assertEquals(values[i--], hit.getSortValues()[0]); + } } - } + ); } } public void testAggs() { // terms agg - { - SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(terms("ul_terms").field("ul_field")).get(); - assertNoFailures(response); + assertNoFailuresAndResponse(prepareSearch("idx").setSize(0).addAggregation(terms("ul_terms").field("ul_field")), response -> { Terms terms = response.getAggregations().get("ul_terms"); long[] expectedBucketDocCounts = { 2, 2, 2, 1, 1, 1, 1 }; @@ -228,68 +221,62 @@ public void testAggs() { assertEquals(expectedBucketKeys[i], bucket.getKey()); i++; } - } + }); // histogram agg - { - SearchResponse response = prepareSearch("idx").setSize(0) - .addAggregation(histogram("ul_histo").field("ul_field").interval(9E18).minDocCount(0)) - .get(); - assertNoFailures(response); - Histogram histo = response.getAggregations().get("ul_histo"); + assertNoFailuresAndResponse( + prepareSearch("idx").setSize(0).addAggregation(histogram("ul_histo").field("ul_field").interval(9E18).minDocCount(0)), + response -> { + Histogram histo = response.getAggregations().get("ul_histo"); - long[] expectedBucketDocCounts = { 3, 3, 4 }; - double[] expectedBucketKeys = { 0, 9.0E18, 1.8E19 }; - int i = 0; - for (Histogram.Bucket bucket : histo.getBuckets()) { - assertEquals(expectedBucketDocCounts[i], bucket.getDocCount()); - assertEquals(expectedBucketKeys[i], bucket.getKey()); - i++; + long[] expectedBucketDocCounts = { 3, 3, 4 }; + double[] expectedBucketKeys = { 0, 9.0E18, 1.8E19 }; + int i = 0; + for (Histogram.Bucket bucket : histo.getBuckets()) { + assertEquals(expectedBucketDocCounts[i], bucket.getDocCount()); + assertEquals(expectedBucketKeys[i], bucket.getKey()); + i++; + } } - } + ); // range agg - { - SearchResponse response = prepareSearch("idx").setSize(0) + assertNoFailuresAndResponse( + prepareSearch("idx").setSize(0) .addAggregation( range("ul_range").field("ul_field").addUnboundedTo(9.0E18).addRange(9.0E18, 1.8E19).addUnboundedFrom(1.8E19) - ) - .get(); - assertNoFailures(response); - Range range = response.getAggregations().get("ul_range"); + ), + response -> { + Range range = response.getAggregations().get("ul_range"); - long[] expectedBucketDocCounts = { 3, 3, 4 }; - String[] expectedBucketKeys = { "*-9.0E18", "9.0E18-1.8E19", "1.8E19-*" }; - int i = 0; - for (Range.Bucket bucket : range.getBuckets()) { - assertEquals(expectedBucketDocCounts[i], bucket.getDocCount()); - assertEquals(expectedBucketKeys[i], bucket.getKey()); - i++; + long[] expectedBucketDocCounts = { 3, 3, 4 }; + String[] expectedBucketKeys = { "*-9.0E18", "9.0E18-1.8E19", "1.8E19-*" }; + int i = 0; + for (Range.Bucket bucket : range.getBuckets()) { + assertEquals(expectedBucketDocCounts[i], bucket.getDocCount()); + assertEquals(expectedBucketKeys[i], bucket.getKey()); + i++; + } } - } + ); // sum agg - { - SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(sum("ul_sum").field("ul_field")).get(); - assertNoFailures(response); + assertNoFailuresAndResponse(prepareSearch("idx").setSize(0).addAggregation(sum("ul_sum").field("ul_field")), response -> { Sum sum = response.getAggregations().get("ul_sum"); double expectedSum = Arrays.stream(values).mapToDouble(Number::doubleValue).sum(); assertEquals(expectedSum, sum.value(), 0.001); - } + }); // max agg - { - SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(max("ul_max").field("ul_field")).get(); - assertNoFailures(response); + assertNoFailuresAndResponse(prepareSearch("idx").setSize(0).addAggregation(max("ul_max").field("ul_field")), response -> { Max max = response.getAggregations().get("ul_max"); assertEquals(1.8446744073709551615E19, max.value(), 0.001); - } + }); + // min agg - { - SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(min("ul_min").field("ul_field")).get(); - assertNoFailures(response); + assertNoFailuresAndResponse(prepareSearch("idx").setSize(0).addAggregation(min("ul_min").field("ul_field")), response -> { Min min = response.getAggregations().get("ul_min"); assertEquals(0, min.value(), 0.001); - } + }); } public void testSortDifferentFormatsShouldFail() { @@ -304,15 +291,11 @@ public void testSortDifferentFormatsShouldFail() { } public void testRangeQuery() { - SearchResponse response = prepareSearch("idx").setSize(0) - .setQuery(new RangeQueryBuilder("ul_field").to("9.0E18").includeUpper(false)) - .get(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - response = prepareSearch("idx").setSize(0) - .setQuery(new RangeQueryBuilder("ul_field").from("9.0E18").to("1.8E19").includeUpper(false)) - .get(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - response = prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").from("1.8E19")).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(4L)); + assertHitCount(prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").to("9.0E18").includeUpper(false)), 3); + assertHitCount( + prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").from("9.0E18").to("1.8E19").includeUpper(false)), + 3 + ); + assertHitCount(prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").from("1.8E19")), 4); } } diff --git a/x-pack/plugin/mapper-version/src/internalClusterTest/java/org/elasticsearch/xpack/versionfield/VersionFieldIT.java b/x-pack/plugin/mapper-version/src/internalClusterTest/java/org/elasticsearch/xpack/versionfield/VersionFieldIT.java index 0dc7ca8006f8a..b5f172d26ffe7 100644 --- a/x-pack/plugin/mapper-version/src/internalClusterTest/java/org/elasticsearch/xpack/versionfield/VersionFieldIT.java +++ b/x-pack/plugin/mapper-version/src/internalClusterTest/java/org/elasticsearch/xpack/versionfield/VersionFieldIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.versionfield; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -24,6 +23,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; @@ -67,18 +67,20 @@ public void testTermsAggregation() throws Exception { indicesAdmin().prepareRefresh().get(); // terms aggs - SearchResponse response = client().prepareSearch(indexName) - .addAggregation(AggregationBuilders.terms("myterms").field("version")) - .get(); - Terms terms = response.getAggregations().get("myterms"); - List buckets = terms.getBuckets(); - - assertEquals(5, buckets.size()); - assertEquals("1.0", buckets.get(0).getKey()); - assertEquals("1.3.0", buckets.get(1).getKey()); - assertEquals("2.1.0-alpha", buckets.get(2).getKey()); - assertEquals("2.1.0", buckets.get(3).getKey()); - assertEquals("3.11.5", buckets.get(4).getKey()); + assertResponse( + client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("myterms").field("version")), + response -> { + Terms terms = response.getAggregations().get("myterms"); + List buckets = terms.getBuckets(); + + assertEquals(5, buckets.size()); + assertEquals("1.0", buckets.get(0).getKey()); + assertEquals("1.3.0", buckets.get(1).getKey()); + assertEquals("2.1.0-alpha", buckets.get(2).getKey()); + assertEquals("2.1.0", buckets.get(3).getKey()); + assertEquals("3.11.5", buckets.get(4).getKey()); + } + ); } public void testTermsEnum() throws Exception { diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java index 038f2b54965ce..f41cc145831cf 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.versionfield; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -27,6 +26,8 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public class VersionStringFieldTests extends ESSingleNodeTestCase { @@ -55,108 +56,71 @@ public void testExactQueries() throws Exception { setUpIndex(indexName); // match - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", ("1.0.0"))).get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.4.0")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.3.0")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.3.0+build.1234567")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", ("1.0.0"))), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.4.0")), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.3.0")), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.3.0+build.1234567")), 1); // term - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.0.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.4.0")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.3.0")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.3.0+build.1234567")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.0.0")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.4.0")), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.3.0")), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termQuery("version", "1.3.0+build.1234567")), 1); // terms - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termsQuery("version", "1.0.0", "1.3.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.termsQuery("version", "1.4.0", "1.3.0+build.1234567")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termsQuery("version", "1.0.0", "1.3.0")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.termsQuery("version", "1.4.0", "1.3.0+build.1234567")), 1); // phrase query (just for keyword compatibility) - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchPhraseQuery("version", "2.1.0-alpha.beta")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchPhraseQuery("version", "2.1.0-alpha.beta")), 1); } public void testRangeQueries() throws Exception { String indexName = setUpIndex("test"); - SearchResponse response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.rangeQuery("version").from("1.0.0").to("3.0.0")) - .get(); - assertEquals(4, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.1.0").to("3.0.0")).get(); - assertEquals(3, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.rangeQuery("version").from("0.1.0").to("2.1.0-alpha.beta")) - .get(); - assertEquals(3, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("2.1.0").to("3.0.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("3.0.0").to("4.0.0")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.rangeQuery("version").from("1.3.0+build.1234569").to("3.0.0")) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.0.0").to("3.0.0")), 4); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.1.0").to("3.0.0")), 3); + assertHitCount( + client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("0.1.0").to("2.1.0-alpha.beta")), + 3 + ); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("2.1.0").to("3.0.0")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("3.0.0").to("4.0.0")), 0); + assertHitCount( + client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.3.0+build.1234569").to("3.0.0")), + 2 + ); // ranges excluding edges - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.0.0", false).to("3.0.0")).get(); - assertEquals(3, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.0.0").to("2.1.0", false)).get(); - assertEquals(3, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.0.0", false).to("3.0.0")), 3); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.0.0").to("2.1.0", false)), 3); // open ranges - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.4.0")).get(); - assertEquals(4, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.4.0")).get(); - assertEquals(2, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.4.0")), 4); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.4.0")), 2); } public void testPrefixQuery() throws IOException { String indexName = setUpIndex("test"); // prefix - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "1")).get(); - assertEquals(3, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1")).get(); - assertEquals(2, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "1.3.0+b")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2")).get(); - assertEquals(3, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.1")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.11")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "1")), 3); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1")), 2); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "1.3.0+b")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2")), 3); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.1")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "21.11")), 1); // test case sensitivity / insensitivity - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)).get(); - assertEquals(1, response.getHits().getTotalHits().value); - assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A")), 0); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.prefixQuery("version", "2.1.0-A").caseInsensitive(true)), + response -> { + assertEquals(1, response.getHits().getTotalHits().value); + assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); + } + ); } public void testSort() throws IOException { @@ -167,32 +131,37 @@ public void testSort() throws IOException { client().admin().indices().prepareRefresh(indexName).get(); // sort based on version field - SearchResponse response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.matchAllQuery()) - .addSort("version", SortOrder.DESC) - .get(); - assertEquals(8, response.getHits().getTotalHits().value); - SearchHit[] hits = response.getHits().getHits(); - assertEquals("1.3.567#12", hits[0].getSortValues()[0]); - assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); - assertEquals("21.11.0", hits[2].getSortValues()[0]); - assertEquals("11.1.0", hits[3].getSortValues()[0]); - assertEquals("2.1.0", hits[4].getSortValues()[0]); - assertEquals("2.1.0-alpha.beta", hits[5].getSortValues()[0]); - assertEquals("1.3.0+build.1234567", hits[6].getSortValues()[0]); - assertEquals("1.0.0", hits[7].getSortValues()[0]); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC).get(); - assertEquals(8, response.getHits().getTotalHits().value); - hits = response.getHits().getHits(); - assertEquals("1.0.0", hits[0].getSortValues()[0]); - assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); - assertEquals("2.1.0-alpha.beta", hits[2].getSortValues()[0]); - assertEquals("2.1.0", hits[3].getSortValues()[0]); - assertEquals("11.1.0", hits[4].getSortValues()[0]); - assertEquals("21.11.0", hits[5].getSortValues()[0]); - assertEquals("1.2.3alpha", hits[6].getSortValues()[0]); - assertEquals("1.3.567#12", hits[7].getSortValues()[0]); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), + response -> { + assertEquals(8, response.getHits().getTotalHits().value); + SearchHit[] hits = response.getHits().getHits(); + assertEquals("1.3.567#12", hits[0].getSortValues()[0]); + assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); + assertEquals("21.11.0", hits[2].getSortValues()[0]); + assertEquals("11.1.0", hits[3].getSortValues()[0]); + assertEquals("2.1.0", hits[4].getSortValues()[0]); + assertEquals("2.1.0-alpha.beta", hits[5].getSortValues()[0]); + assertEquals("1.3.0+build.1234567", hits[6].getSortValues()[0]); + assertEquals("1.0.0", hits[7].getSortValues()[0]); + } + ); + + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), + response -> { + assertEquals(8, response.getHits().getTotalHits().value); + var hits = response.getHits().getHits(); + assertEquals("1.0.0", hits[0].getSortValues()[0]); + assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); + assertEquals("2.1.0-alpha.beta", hits[2].getSortValues()[0]); + assertEquals("2.1.0", hits[3].getSortValues()[0]); + assertEquals("11.1.0", hits[4].getSortValues()[0]); + assertEquals("21.11.0", hits[5].getSortValues()[0]); + assertEquals("1.2.3alpha", hits[6].getSortValues()[0]); + assertEquals("1.3.567#12", hits[7].getSortValues()[0]); + } + ); } public void testRegexQuery() throws Exception { @@ -209,36 +178,44 @@ public void testRegexQuery() throws Exception { prepareIndex(indexName).setId("5").setSource(jsonBuilder().startObject().field("version", "2.33.0").endObject()).get(); client().admin().indices().prepareRefresh(indexName).get(); - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); + }); + + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), + response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + } + ); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); + }); // test case sensitivity / insensitivity - response = client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")).get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + }); + + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*")), 0); + + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), + response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + } + ); } public void testFuzzyQuery() throws Exception { @@ -256,11 +233,12 @@ public void testFuzzyQuery() throws Exception { prepareIndex(indexName).setId("6").setSource(jsonBuilder().startObject().field("version", "2.a3.0").endObject()).get(); client().admin().indices().prepareRefresh(indexName).get(); - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")).get(); - assertEquals(3, response.getHits().getTotalHits().value); - assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); - assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { + assertEquals(3, response.getHits().getTotalHits().value); + assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); + assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); + }); } public void testWildcardQuery() throws Exception { @@ -305,25 +283,27 @@ public void testWildcardQuery() throws Exception { checkWildcardQuery(indexName, "3.1.1??", new String[] { "3.1.1-a", "3.1.1+b", "3.1.123" }); // test case sensitivity / insensitivity - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*")).get(); - assertEquals(0, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName) - .setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); - assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*")), 0); + + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), + response -> { + assertEquals(2, response.getHits().getTotalHits().value); + assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); + assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); + } + ); } private void checkWildcardQuery(String indexName, String query, String... expectedResults) { - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)).get(); - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); - for (int i = 0; i < expectedResults.length; i++) { - String expected = expectedResults[i]; - Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); - assertEquals("expected " + expected + " in position " + i + " but found " + actual, expected, actual); - } + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { + assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + for (int i = 0; i < expectedResults.length; i++) { + String expected = expectedResults[i]; + Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); + assertEquals("expected " + expected + " in position " + i + " but found " + actual, expected, actual); + } + }); } /** @@ -340,59 +320,60 @@ public void testStoreMalformed() throws Exception { prepareIndex(indexName).setId("4").setSource(jsonBuilder().startObject().field("version", "").endObject()).get(); client().admin().indices().prepareRefresh(indexName).get(); - SearchResponse response = client().prepareSearch(indexName).addDocValueField("version").get(); - assertEquals(4, response.getHits().getTotalHits().value); - assertEquals("1", response.getHits().getAt(0).getId()); - assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); + assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { + assertEquals(4, response.getHits().getTotalHits().value); + assertEquals("1", response.getHits().getAt(0).getId()); + assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); - assertEquals("2", response.getHits().getAt(1).getId()); - assertEquals("2.2.0", response.getHits().getAt(1).field("version").getValue()); + assertEquals("2", response.getHits().getAt(1).getId()); + assertEquals("2.2.0", response.getHits().getAt(1).field("version").getValue()); - assertEquals("3", response.getHits().getAt(2).getId()); - assertEquals("2.2.0-badchar!", response.getHits().getAt(2).field("version").getValue()); + assertEquals("3", response.getHits().getAt(2).getId()); + assertEquals("2.2.0-badchar!", response.getHits().getAt(2).field("version").getValue()); - assertEquals("4", response.getHits().getAt(3).getId()); - assertEquals("", response.getHits().getAt(3).field("version").getValue()); + assertEquals("4", response.getHits().getAt(3).getId()); + assertEquals("", response.getHits().getAt(3).field("version").getValue()); + }); // exact match for malformed term - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.invalid.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "2.2.0-badchar!")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "1.invalid.0")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "2.2.0-badchar!")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "")), 1); // also should appear in terms aggs - response = client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("myterms").field("version")).get(); - Terms terms = response.getAggregations().get("myterms"); - List buckets = terms.getBuckets(); - - assertEquals(4, buckets.size()); - assertEquals("2.2.0", buckets.get(0).getKey()); - assertEquals("", buckets.get(1).getKey()); - assertEquals("1.invalid.0", buckets.get(2).getKey()); - assertEquals("2.2.0-badchar!", buckets.get(3).getKey()); + assertResponse( + client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("myterms").field("version")), + response -> { + Terms terms = response.getAggregations().get("myterms"); + List buckets = terms.getBuckets(); + + assertEquals(4, buckets.size()); + assertEquals("2.2.0", buckets.get(0).getKey()); + assertEquals("", buckets.get(1).getKey()); + assertEquals("1.invalid.0", buckets.get(2).getKey()); + assertEquals("2.2.0-badchar!", buckets.get(3).getKey()); + } + ); // invalid values should sort after all valid ones - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC).get(); - assertEquals(4, response.getHits().getTotalHits().value); - SearchHit[] hits = response.getHits().getHits(); - assertEquals("2.2.0", hits[0].getSortValues()[0]); - assertEquals("", hits[1].getSortValues()[0]); - assertEquals("1.invalid.0", hits[2].getSortValues()[0]); - assertEquals("2.2.0-badchar!", hits[3].getSortValues()[0]); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), + response -> { + assertEquals(4, response.getHits().getTotalHits().value); + SearchHit[] hits = response.getHits().getHits(); + assertEquals("2.2.0", hits[0].getSortValues()[0]); + assertEquals("", hits[1].getSortValues()[0]); + assertEquals("1.invalid.0", hits[2].getSortValues()[0]); + assertEquals("2.2.0-badchar!", hits[3].getSortValues()[0]); + } + ); // ranges can include them, but they are sorted last - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("3.0.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("3.0.0")).get(); - assertEquals(3, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("3.0.0")), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("3.0.0")), 3); // using the empty string as lower bound should return all "invalid" versions - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("")).get(); - assertEquals(3, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("")), 3); } public void testAggs() throws Exception { @@ -408,31 +389,39 @@ public void testAggs() throws Exception { client().admin().indices().prepareRefresh(indexName).get(); // terms aggs - SearchResponse response = client().prepareSearch(indexName) - .addAggregation(AggregationBuilders.terms("myterms").field("version")) - .get(); - Terms terms = response.getAggregations().get("myterms"); - List buckets = terms.getBuckets(); - - assertEquals(5, buckets.size()); - assertEquals("1.0", buckets.get(0).getKey()); - assertEquals("1.3.0", buckets.get(1).getKey()); - assertEquals("2.1.0-alpha", buckets.get(2).getKey()); - assertEquals("2.1.0", buckets.get(3).getKey()); - assertEquals("3.11.5", buckets.get(4).getKey()); + assertResponse( + client().prepareSearch(indexName).addAggregation(AggregationBuilders.terms("myterms").field("version")), + response -> { + Terms terms = response.getAggregations().get("myterms"); + List buckets = terms.getBuckets(); + + assertEquals(5, buckets.size()); + assertEquals("1.0", buckets.get(0).getKey()); + assertEquals("1.3.0", buckets.get(1).getKey()); + assertEquals("2.1.0-alpha", buckets.get(2).getKey()); + assertEquals("2.1.0", buckets.get(3).getKey()); + assertEquals("3.11.5", buckets.get(4).getKey()); + } + ); // cardinality - response = client().prepareSearch(indexName).addAggregation(AggregationBuilders.cardinality("myterms").field("version")).get(); - Cardinality card = response.getAggregations().get("myterms"); - assertEquals(5, card.getValue()); + assertResponse( + client().prepareSearch(indexName).addAggregation(AggregationBuilders.cardinality("myterms").field("version")), + response -> { + Cardinality card = response.getAggregations().get("myterms"); + assertEquals(5, card.getValue()); + } + ); // string stats - response = client().prepareSearch(indexName) - .addAggregation(AnalyticsAggregationBuilders.stringStats("stats").field("version")) - .get(); - InternalStringStats stats = response.getAggregations().get("stats"); - assertEquals(3, stats.getMinLength()); - assertEquals(11, stats.getMaxLength()); + assertResponse( + client().prepareSearch(indexName).addAggregation(AnalyticsAggregationBuilders.stringStats("stats").field("version")), + response -> { + InternalStringStats stats = response.getAggregations().get("stats"); + assertEquals(3, stats.getMinLength()); + assertEquals(11, stats.getMaxLength()); + } + ); } public void testMultiValues() throws Exception { @@ -447,28 +436,37 @@ public void testMultiValues() throws Exception { .get(); client().admin().indices().prepareRefresh(indexName).get(); - SearchResponse response = client().prepareSearch(indexName).addSort("version", SortOrder.ASC).get(); - assertEquals(3, response.getHits().getTotalHits().value); - assertEquals("1", response.getHits().getAt(0).getId()); - assertEquals("2", response.getHits().getAt(1).getId()); - assertEquals("3", response.getHits().getAt(2).getId()); + assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { + assertEquals(3, response.getHits().getTotalHits().value); + assertEquals("1", response.getHits().getAt(0).getId()); + assertEquals("2", response.getHits().getAt(1).getId()); + assertEquals("3", response.getHits().getAt(2).getId()); + }); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - assertEquals("1", response.getHits().getAt(0).getId()); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { + assertEquals(1, response.getHits().getTotalHits().value); + assertEquals("1", response.getHits().getAt(0).getId()); + }); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); - assertEquals("2", response.getHits().getAt(0).getId()); + assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { + assertEquals(1, response.getHits().getTotalHits().value); + assertEquals("2", response.getHits().getAt(0).getId()); + }); // range - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), + response -> assertEquals(1, response.getHits().getTotalHits().value) + ); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")).get(); - assertEquals(3, response.getHits().getTotalHits().value); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), + response -> assertEquals(3, response.getHits().getTotalHits().value) + ); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertResponse( + client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), + response -> assertEquals(1, response.getHits().getTotalHits().value) + ); } } diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java index 0e060b3c94644..ecfa868046275 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java @@ -196,6 +196,7 @@ public void setupModelAndData() throws IOException { adminClient().performRequest(new Request("POST", INDEX_NAME + "/_refresh")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103072") public void testLearningToRankRescore() throws Exception { Request request = new Request("GET", "store/_search?size=3&error_trace"); request.setJsonEntity(""" @@ -231,6 +232,7 @@ public void testLearningToRankRescore() throws Exception { assertHitScores(client().performRequest(request), List.of(9.0, 9.0, 6.0)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103072") public void testLearningToRankRescoreSmallWindow() throws Exception { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" @@ -243,6 +245,7 @@ public void testLearningToRankRescoreSmallWindow() throws Exception { assertHitScores(client().performRequest(request), List.of(20.0, 20.0, 1.0, 1.0, 1.0)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103072") public void testLearningToRankRescorerWithChainedRescorers() throws IOException { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 749a31de51b07..1031d45facf85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -46,6 +46,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -65,6 +66,7 @@ import org.elasticsearch.plugins.CircuitBreakerPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; @@ -359,6 +361,7 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory; import org.elasticsearch.xpack.ml.job.snapshot.upgrader.SnapshotUpgradeTaskExecutor; import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutor; +import org.elasticsearch.xpack.ml.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; @@ -476,7 +479,8 @@ public class MachineLearning extends Plugin PersistentTaskPlugin, SearchPlugin, ShutdownAwarePlugin, - ExtensiblePlugin { + ExtensiblePlugin, + MapperPlugin { public static final String NAME = "ml"; public static final String BASE_PATH = "/_ml/"; // Endpoints that were deprecated in 7.x can still be called in 8.x using the REST compatibility layer @@ -2288,4 +2292,12 @@ public void signalShutdown(Collection shutdownNodeIds) { mlLifeCycleService.get().signalGracefulShutdown(shutdownNodeIds); } } + + @Override + public Map getMappers() { + if (SemanticTextFeature.isEnabled()) { + return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); + } + return Map.of(); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java new file mode 100644 index 0000000000000..f861760803e56 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.common.util.FeatureFlag; + +/** + * semantic_text feature flag. When the feature is complete, this flag will be removed. + */ +public class SemanticTextFeature { + + private SemanticTextFeature() {} + + private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("semantic_text"); + + public static boolean isEnabled() { + return FEATURE_FLAG.isEnabled(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizationPartOfSpeechDictionary.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizationPartOfSpeechDictionary.java index 09a6846ead344..243286115eb8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizationPartOfSpeechDictionary.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizationPartOfSpeechDictionary.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ml.aggs.categorization; import java.io.BufferedReader; -import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; @@ -73,49 +72,56 @@ static PartOfSpeech fromCode(char partOfSpeechCode) { } } - /** - * Lazy loaded singleton instance to avoid loading the dictionary repeatedly. - */ - private static CategorizationPartOfSpeechDictionary instance; - private static final Object INIT_LOCK = new Object(); + private static final class Holder { + /** + * Lazy loaded singleton instance to avoid loading the dictionary repeatedly. + */ + private static final CategorizationPartOfSpeechDictionary instance = new CategorizationPartOfSpeechDictionary(); + } /** * Keys are lower case. */ - private final Map partOfSpeechDictionary = new HashMap<>(); - private final int maxDictionaryWordLength; + private final Map partOfSpeechDictionary; - CategorizationPartOfSpeechDictionary(InputStream is) throws IOException { + private final int maxDictionaryWordLength; - int maxLength = 0; - BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); - String line; - while ((line = reader.readLine()) != null) { - line = line.trim(); - if (line.isEmpty()) { - continue; - } - String[] split = line.split(PART_OF_SPEECH_SEPARATOR); - if (split.length != 2) { - throw new IllegalArgumentException( - "Unexpected format in line [" + line + "]: expected one [" + PART_OF_SPEECH_SEPARATOR + "] separator" - ); - } - if (split[0].isEmpty()) { - throw new IllegalArgumentException( - "Unexpected format in line [" + line + "]: nothing preceding [" + PART_OF_SPEECH_SEPARATOR + "] separator" - ); - } - if (split[1].isEmpty()) { - throw new IllegalArgumentException( - "Unexpected format in line [" + line + "]: nothing following [" + PART_OF_SPEECH_SEPARATOR + "] separator" - ); + CategorizationPartOfSpeechDictionary() { + try (InputStream is = CategorizationPartOfSpeechDictionary.class.getResourceAsStream(DICTIONARY_FILE_PATH)) { + int maxLength = 0; + BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); + String line; + final Map partOfSpeechMap = new HashMap<>(); + while ((line = reader.readLine()) != null) { + line = line.trim(); + if (line.isEmpty()) { + continue; + } + String[] split = line.split(PART_OF_SPEECH_SEPARATOR); + if (split.length != 2) { + throw new IllegalArgumentException( + "Unexpected format in line [" + line + "]: expected one [" + PART_OF_SPEECH_SEPARATOR + "] separator" + ); + } + if (split[0].isEmpty()) { + throw new IllegalArgumentException( + "Unexpected format in line [" + line + "]: nothing preceding [" + PART_OF_SPEECH_SEPARATOR + "] separator" + ); + } + if (split[1].isEmpty()) { + throw new IllegalArgumentException( + "Unexpected format in line [" + line + "]: nothing following [" + PART_OF_SPEECH_SEPARATOR + "] separator" + ); + } + String lowerCaseWord = split[0].toLowerCase(Locale.ROOT); + partOfSpeechMap.put(lowerCaseWord, PartOfSpeech.fromCode(split[1].charAt(0))); + maxLength = Math.max(maxLength, lowerCaseWord.length()); } - String lowerCaseWord = split[0].toLowerCase(Locale.ROOT); - partOfSpeechDictionary.put(lowerCaseWord, PartOfSpeech.fromCode(split[1].charAt(0))); - maxLength = Math.max(maxLength, lowerCaseWord.length()); + partOfSpeechDictionary = Map.copyOf(partOfSpeechMap); + maxDictionaryWordLength = maxLength; + } catch (Exception e) { + throw new AssertionError(e); } - maxDictionaryWordLength = maxLength; } // TODO: now we have this in Java, perform this operation in Java for anomaly detection categorization instead of in C++. @@ -142,17 +148,7 @@ public boolean isInDictionary(String word) { return getPartOfSpeech(word) != PartOfSpeech.NOT_IN_DICTIONARY; } - public static CategorizationPartOfSpeechDictionary getInstance() throws IOException { - if (instance != null) { - return instance; - } - synchronized (INIT_LOCK) { - if (instance == null) { - try (InputStream is = CategorizationPartOfSpeechDictionary.class.getResourceAsStream(DICTIONARY_FILE_PATH)) { - instance = new CategorizationPartOfSpeechDictionary(is); - } - } - return instance; - } + public static CategorizationPartOfSpeechDictionary getInstance() { + return Holder.instance; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 8ad7cd92a8e73..829101b3bd551 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; @@ -241,7 +241,7 @@ private void executeJobInMiddleOfReindexing(DataFrameAnalyticsTask task, DataFra ClientHelper.executeAsyncWithOrigin( new ParentTaskAssigningClient(client, task.getParentTaskId()), ML_ORIGIN, - DeleteIndexAction.INSTANCE, + TransportDeleteIndexAction.TYPE, new DeleteIndexRequest(config.getDest().getIndex()), ActionListener.wrap( r -> executeStep(task, config, new ReindexingStep(clusterService, client, task, auditor, config, destIndexAllowedSettings)), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java new file mode 100644 index 0000000000000..cf713546a071a --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.mapper; + +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.fielddata.FieldDataContext; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.InferenceModelFieldType; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SimpleMappedFieldType; +import org.elasticsearch.index.mapper.SourceValueFetcher; +import org.elasticsearch.index.mapper.TextSearchInfo; +import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.query.SearchExecutionContext; + +import java.io.IOException; +import java.util.Map; + +/** + * A {@link FieldMapper} for semantic text fields. These fields have a model id reference, that is used for performing inference + * at ingestion and query time. + * For now, it is compatible with text expansion models only, but will be extended to support dense vector models as well. + * This field mapper performs no indexing, as inference results will be included as a different field in the document source, and will + * be indexed using a different field mapper. + */ +public class SemanticTextFieldMapper extends FieldMapper { + + public static final String CONTENT_TYPE = "semantic_text"; + + private static SemanticTextFieldMapper toType(FieldMapper in) { + return (SemanticTextFieldMapper) in; + } + + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n), notInMultiFields(CONTENT_TYPE)); + + private SemanticTextFieldMapper(String simpleName, MappedFieldType mappedFieldType, CopyTo copyTo) { + super(simpleName, mappedFieldType, MultiFields.empty(), copyTo); + } + + @Override + public FieldMapper.Builder getMergeBuilder() { + return new Builder(simpleName()).init(this); + } + + @Override + protected void parseCreateField(DocumentParserContext context) throws IOException { + // Just parses text - no indexing is performed + context.parser().textOrNull(); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + public SemanticTextFieldType fieldType() { + return (SemanticTextFieldType) super.fieldType(); + } + + public static class Builder extends FieldMapper.Builder { + + private final Parameter modelId = Parameter.stringParam("model_id", false, m -> toType(m).fieldType().modelId, null) + .addValidator(v -> { + if (Strings.isEmpty(v)) { + throw new IllegalArgumentException("field [model_id] must be specified"); + } + }); + + private final Parameter> meta = Parameter.metaParam(); + + public Builder(String name) { + super(name); + } + + @Override + protected Parameter[] getParameters() { + return new Parameter[] { modelId, meta }; + } + + @Override + public SemanticTextFieldMapper build(MapperBuilderContext context) { + return new SemanticTextFieldMapper(name(), new SemanticTextFieldType(name(), modelId.getValue(), meta.getValue()), copyTo); + } + } + + public static class SemanticTextFieldType extends SimpleMappedFieldType implements InferenceModelFieldType { + + private final String modelId; + + public SemanticTextFieldType(String name, String modelId, Map meta) { + super(name, false, false, false, TextSearchInfo.NONE, meta); + this.modelId = modelId; + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public String getInferenceModel() { + return modelId; + } + + @Override + public Query termQuery(Object value, SearchExecutionContext context) { + throw new IllegalArgumentException("termQuery not implemented yet"); + } + + @Override + public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { + return SourceValueFetcher.toString(name(), context, format); + } + + @Override + public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { + throw new IllegalArgumentException("[semantic_text] fields do not support sorting, scripting or aggregating"); + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java index 20fadab86008a..b560a758b8e83 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/EmptyStateIndexRemoverTests.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.ml.job.retention; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; @@ -133,14 +133,14 @@ private void assertDeleteActionExecuted(boolean acknowledged) { doAnswer(withResponse(getIndexResponse)).when(client).execute(eq(GetIndexAction.INSTANCE), any(), any()); AcknowledgedResponse deleteIndexResponse = AcknowledgedResponse.of(acknowledged); - doAnswer(withResponse(deleteIndexResponse)).when(client).execute(eq(DeleteIndexAction.INSTANCE), any(), any()); + doAnswer(withResponse(deleteIndexResponse)).when(client).execute(eq(TransportDeleteIndexAction.TYPE), any(), any()); remover.remove(1.0f, listener, () -> false); InOrder inOrder = inOrder(client, listener); inOrder.verify(client).execute(eq(IndicesStatsAction.INSTANCE), any(), any()); inOrder.verify(client).execute(eq(GetIndexAction.INSTANCE), any(), any()); - inOrder.verify(client).execute(eq(DeleteIndexAction.INSTANCE), deleteIndexRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportDeleteIndexAction.TYPE), deleteIndexRequestCaptor.capture(), any()); inOrder.verify(listener).onResponse(acknowledged); DeleteIndexRequest deleteIndexRequest = deleteIndexRequestCaptor.getValue(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java new file mode 100644 index 0000000000000..ccb8f106e4945 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.mapper; + +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperTestCase; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.junit.AssumptionViolatedException; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.containsString; + +public class SemanticTextFieldMapperTests extends MapperTestCase { + + public void testDefaults() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + + ParsedDocument doc1 = mapper.parse(source(this::writeField)); + List fields = doc1.rootDoc().getFields("field"); + + // No indexable fields + assertTrue(fields.isEmpty()); + } + + public void testModelIdNotPresent() throws IOException { + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text"))) + ); + assertThat(e.getMessage(), containsString("field [model_id] must be specified")); + } + + public void testCannotBeUsedInMultiFields() { + Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { + b.field("type", "text"); + b.startObject("fields"); + b.startObject("semantic"); + b.field("type", "semantic_text"); + b.endObject(); + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("Field [semantic] of type [semantic_text] can't be used in multifields")); + } + + public void testUpdatesToModelIdNotSupported() throws IOException { + MapperService mapperService = createMapperService( + fieldMapping(b -> b.field("type", "semantic_text").field("model_id", "test_model")) + ); + Exception e = expectThrows( + IllegalArgumentException.class, + () -> merge(mapperService, fieldMapping(b -> b.field("type", "semantic_text").field("model_id", "another_model"))) + ); + assertThat(e.getMessage(), containsString("Cannot update parameter [model_id] from [test_model] to [another_model]")); + } + + @Override + protected Collection getPlugins() { + return singletonList(new MachineLearning(Settings.EMPTY)); + } + + @Override + protected void minimalMapping(XContentBuilder b) throws IOException { + b.field("type", "semantic_text").field("model_id", "test_model"); + } + + @Override + protected Object getSampleValueForDocument() { + return "value"; + } + + @Override + protected boolean supportsIgnoreMalformed() { + return false; + } + + @Override + protected boolean supportsStoredFields() { + return false; + } + + @Override + protected void registerParameters(ParameterChecker checker) throws IOException {} + + @Override + protected Object generateRandomInputValue(MappedFieldType ft) { + assumeFalse("doc_values are not supported in semantic_text", true); + return null; + } + + @Override + protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { + throw new AssumptionViolatedException("not supported"); + } + + @Override + protected IngestScriptSupport ingestScriptSupport() { + throw new AssumptionViolatedException("not supported"); + } +} diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 506cd016cf8de..a0990330aecb7 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -382,7 +383,7 @@ public void enableMonitoring() throws Exception { assertThat( "No monitoring documents yet", - client().prepareSearch(".monitoring-es-" + TEMPLATE_VERSION + "-*").setSize(0).get().getHits().getTotalHits().value, + SearchResponseUtils.getTotalHitsValue(client().prepareSearch(".monitoring-es-" + TEMPLATE_VERSION + "-*").setSize(0)), greaterThan(0L) ); }, 30L, TimeUnit.SECONDS); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index a732e80e18f37..ef4f22f852b37 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.test.ESIntegTestCase; @@ -126,47 +127,37 @@ public void testExport() throws Exception { ensureYellowAndNoInitializingShards(".monitoring-*"); assertThat( - prepareSearch(".monitoring-es-*").setSize(0) - .setQuery(QueryBuilders.termQuery("type", "cluster_stats")) - .get() - .getHits() - .getTotalHits().value, + SearchResponseUtils.getTotalHitsValue( + prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "cluster_stats")) + ), greaterThan(0L) ); assertThat( - prepareSearch(".monitoring-es-*").setSize(0) - .setQuery(QueryBuilders.termQuery("type", "index_recovery")) - .get() - .getHits() - .getTotalHits().value, + SearchResponseUtils.getTotalHitsValue( + prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "index_recovery")) + ), greaterThan(0L) ); assertThat( - prepareSearch(".monitoring-es-*").setSize(0) - .setQuery(QueryBuilders.termQuery("type", "index_stats")) - .get() - .getHits() - .getTotalHits().value, + SearchResponseUtils.getTotalHitsValue( + prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "index_stats")) + ), greaterThan(0L) ); assertThat( - prepareSearch(".monitoring-es-*").setSize(0) - .setQuery(QueryBuilders.termQuery("type", "indices_stats")) - .get() - .getHits() - .getTotalHits().value, + SearchResponseUtils.getTotalHitsValue( + prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "indices_stats")) + ), greaterThan(0L) ); assertThat( - prepareSearch(".monitoring-es-*").setSize(0) - .setQuery(QueryBuilders.termQuery("type", "shards")) - .get() - .getHits() - .getTotalHits().value, + SearchResponseUtils.getTotalHitsValue( + prepareSearch(".monitoring-es-*").setSize(0).setQuery(QueryBuilders.termQuery("type", "shards")) + ), greaterThan(0L) ); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index 901150d0bca7c..d6e15ea25c8e1 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -265,19 +265,21 @@ private void assertWatchesExist() { SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource() .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); Set watchIds = new HashSet<>(Arrays.asList(ClusterAlertsUtil.WATCH_IDS)); - for (SearchHit hit : prepareSearch(".watches").setSource(searchSource).get().getHits().getHits()) { - String watchId = ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap()); - assertNotNull("Missing watch ID", watchId); - assertTrue("found unexpected watch id", watchIds.contains(watchId)); - - String version = ObjectPath.eval("metadata.xpack.version_created", hit.getSourceAsMap()); - assertNotNull("Missing version from returned watch [" + watchId + "]", version); - assertTrue(Version.fromId(Integer.parseInt(version)).onOrAfter(Version.fromId(ClusterAlertsUtil.LAST_UPDATED_VERSION))); - - String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", hit.getSourceAsMap()); - assertNotNull("Missing cluster uuid", uuid); - assertEquals(clusterUUID, uuid); - } + assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { + for (SearchHit hit : response.getHits().getHits()) { + String watchId = ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap()); + assertNotNull("Missing watch ID", watchId); + assertTrue("found unexpected watch id", watchIds.contains(watchId)); + + String version = ObjectPath.eval("metadata.xpack.version_created", hit.getSourceAsMap()); + assertNotNull("Missing version from returned watch [" + watchId + "]", version); + assertTrue(Version.fromId(Integer.parseInt(version)).onOrAfter(Version.fromId(ClusterAlertsUtil.LAST_UPDATED_VERSION))); + + String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", hit.getSourceAsMap()); + assertNotNull("Missing cluster uuid", uuid); + assertEquals(clusterUUID, uuid); + } + }); } private void assertNoWatchesExist() { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index 923269646d4d1..6919932a7823c 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -14,8 +14,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; @@ -422,9 +422,8 @@ private ActionResponse verifyIndexUpgraded( assertThat(request, instanceOf(CreateIndexRequest.class)); assertNotNull(listener); return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); - } else if (action instanceof DeleteIndexAction) { + } else if (action == TransportDeleteIndexAction.TYPE) { indicesDeleted.incrementAndGet(); - assertThat(action, instanceOf(DeleteIndexAction.class)); assertThat(request, instanceOf(DeleteIndexRequest.class)); assertNotNull(listener); return AcknowledgedResponse.TRUE; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java index 76e9891a71765..d2710a980a6ee 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java @@ -133,6 +133,19 @@ private static long twosComplement(long l) { return l ^ TWOS_COMPLEMENT_BITMASK; } + /** + * Check if the provided double is both finite and a number (i.e. not Double.NaN). + * @param dbl The double to verify. + * @return The input value. + * @throws ArithmeticException if the provided double is either infinite or not a number. + */ + public static double asFiniteNumber(double dbl) { + if (Double.isNaN(dbl) || Double.isInfinite(dbl)) { + throw new ArithmeticException("not a finite double number: " + dbl); + } + return dbl; + } + /** * Converts a number to an integer, saturating that integer if the number doesn't fit naturally. That is to say, values * greater than Integer.MAX_VALUE yield Integer.MAX_VALUE and values less than Integer.MIN_VALUE yield Integer.MIN_VALUE diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java index f12ae0bc86571..9c2fd118d59d9 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java @@ -508,6 +508,7 @@ public void testAuthenticationFailureIfDelegatedAuthorizationFails() throws Exce } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103308") public void testReloadClientSecret() throws Exception { final String principal = SERVICE_SUBJECT.get(); final String username = getUsernameFromPrincipal(principal); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index 7aeaccf63bab4..b1a76a4559812 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -25,6 +24,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -77,8 +77,7 @@ public void testDateMathExpressionsCanBeAuthorized() throws Exception { if (refeshOnOperation == false) { client.admin().indices().prepareRefresh(expression).get(); } - SearchResponse searchResponse = client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, is(1L)); + assertHitCount(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()), 1); assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index 0e8cb486ffb2d..1e1d8a7f0654c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.SecureString; @@ -191,31 +191,31 @@ public void testRequestCacheForDLS() { final Client limitedClient = limitedClient(); // Search first with power client, it should see all docs - assertSearchResponse(powerClient.prepareSearch(DLS_INDEX).setRequestCache(true).get(), Set.of("101", "102")); + assertSearchResponse(powerClient.prepareSearch(DLS_INDEX).setRequestCache(true), Set.of("101", "102")); assertCacheState(DLS_INDEX, 0, 1); // Search with the limited client and it should see only one doc (i.e. it won't use cache entry for power client) - assertSearchResponse(limitedClient.prepareSearch(DLS_INDEX).setRequestCache(true).get(), Set.of("101")); + assertSearchResponse(limitedClient.prepareSearch(DLS_INDEX).setRequestCache(true), Set.of("101")); assertCacheState(DLS_INDEX, 0, 2); // Execute the above search again and it should use the cache entry for limited client - assertSearchResponse(limitedClient.prepareSearch(DLS_INDEX).setRequestCache(true).get(), Set.of("101")); + assertSearchResponse(limitedClient.prepareSearch(DLS_INDEX).setRequestCache(true), Set.of("101")); assertCacheState(DLS_INDEX, 1, 2); // Execute the search with power client again and it should still see all docs - assertSearchResponse(powerClient.prepareSearch(DLS_INDEX).setRequestCache(true).get(), Set.of("101", "102")); + assertSearchResponse(powerClient.prepareSearch(DLS_INDEX).setRequestCache(true), Set.of("101", "102")); assertCacheState(DLS_INDEX, 2, 2); // The limited client has a different DLS query for dls-alias compared to the underlying dls-index - assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS).setRequestCache(true).get(), Set.of("102")); + assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS).setRequestCache(true), Set.of("102")); assertCacheState(DLS_INDEX, 2, 3); - assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS).setRequestCache(true).get(), Set.of("102")); + assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS).setRequestCache(true), Set.of("102")); assertCacheState(DLS_INDEX, 3, 3); // Search with limited client for dls-alias and dls-index returns all docs. The cache entry is however different // from the power client, i.e. still no sharing even if the end results are the same. This is because the // search with limited client still have DLS queries attached to it. - assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS, DLS_INDEX).setRequestCache(true).get(), Set.of("101", "102")); + assertSearchResponse(limitedClient.prepareSearch(DLS_ALIAS, DLS_INDEX).setRequestCache(true), Set.of("101", "102")); assertCacheState(DLS_INDEX, 3, 4); } @@ -224,37 +224,29 @@ public void testRequestCacheForFLS() { final Client limitedClient = limitedClient(); // Search first with power client, it should see all fields - assertSearchResponse( - powerClient.prepareSearch(FLS_INDEX).setRequestCache(true).get(), - Set.of("201", "202"), - Set.of("public", "private") - ); + assertSearchResponse(powerClient.prepareSearch(FLS_INDEX).setRequestCache(true), Set.of("201", "202"), Set.of("public", "private")); assertCacheState(FLS_INDEX, 0, 1); // Search with limited client and it should see only public field - assertSearchResponse(limitedClient.prepareSearch(FLS_INDEX).setRequestCache(true).get(), Set.of("201", "202"), Set.of("public")); + assertSearchResponse(limitedClient.prepareSearch(FLS_INDEX).setRequestCache(true), Set.of("201", "202"), Set.of("public")); assertCacheState(FLS_INDEX, 0, 2); // Search with limited client again and it should use the cache - assertSearchResponse(limitedClient.prepareSearch(FLS_INDEX).setRequestCache(true).get(), Set.of("201", "202"), Set.of("public")); + assertSearchResponse(limitedClient.prepareSearch(FLS_INDEX).setRequestCache(true), Set.of("201", "202"), Set.of("public")); assertCacheState(FLS_INDEX, 1, 2); // Search again with power client, it should use its own cache entry - assertSearchResponse( - powerClient.prepareSearch(FLS_INDEX).setRequestCache(true).get(), - Set.of("201", "202"), - Set.of("public", "private") - ); + assertSearchResponse(powerClient.prepareSearch(FLS_INDEX).setRequestCache(true), Set.of("201", "202"), Set.of("public", "private")); assertCacheState(FLS_INDEX, 2, 2); // The fls-alias has a different FLS definition compared to its underlying fls-index. - assertSearchResponse(limitedClient.prepareSearch(FLS_ALIAS).setRequestCache(true).get(), Set.of("201", "202"), Set.of("private")); + assertSearchResponse(limitedClient.prepareSearch(FLS_ALIAS).setRequestCache(true), Set.of("201", "202"), Set.of("private")); assertCacheState(FLS_INDEX, 2, 3); // Search with the limited client for both fls-alias and fls-index and all docs and fields are also returned. // But request cache is not shared with the power client because it still has a different indexAccessControl assertSearchResponse( - limitedClient.prepareSearch(FLS_ALIAS, FLS_INDEX).setRequestCache(true).get(), + limitedClient.prepareSearch(FLS_ALIAS, FLS_INDEX).setRequestCache(true), Set.of("201", "202"), Set.of("public", "private") ); @@ -267,7 +259,7 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr // Search first with power client, it should see all fields assertSearchResponse( - powerClient.prepareSearch(INDEX).setRequestCache(true).get(), + powerClient.prepareSearch(INDEX).setRequestCache(true), Set.of("1", "2"), Set.of("number", "letter", "public", "private") ); @@ -278,25 +270,17 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr expectThrows(ElasticsearchSecurityException.class, () -> limitedClient.prepareSearch(INDEX).setRequestCache(true).get()); // Search for alias1 that points to index and has DLS/FLS - assertSearchResponse( - limitedClient.prepareSearch(ALIAS1).setRequestCache(true).get(), - Set.of("1"), - Set.of("number", "letter", "public") - ); + assertSearchResponse(limitedClient.prepareSearch(ALIAS1).setRequestCache(true), Set.of("1"), Set.of("number", "letter", "public")); assertCacheState(INDEX, 0, 2); // Search for alias2 that also points to index but has a different set of DLS/FLS - assertSearchResponse( - limitedClient.prepareSearch(ALIAS2).setRequestCache(true).get(), - Set.of("2"), - Set.of("number", "letter", "private") - ); + assertSearchResponse(limitedClient.prepareSearch(ALIAS2).setRequestCache(true), Set.of("2"), Set.of("number", "letter", "private")); assertCacheState(INDEX, 0, 3); // Search for all-alias that has full read access to the underlying index // This makes it share the cache entry of the power client assertSearchResponse( - limitedClient.prepareSearch(ALL_ALIAS).setRequestCache(true).get(), + limitedClient.prepareSearch(ALL_ALIAS).setRequestCache(true), Set.of("1", "2"), Set.of("number", "letter", "public", "private") ); @@ -305,7 +289,7 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr // Similarly, search for alias1 and all-alias results in full read access to the index // and again reuse the cache entry of the power client assertSearchResponse( - limitedClient.prepareSearch(ALIAS1, ALL_ALIAS).setRequestCache(true).get(), + limitedClient.prepareSearch(ALIAS1, ALL_ALIAS).setRequestCache(true), Set.of("1", "2"), Set.of("number", "letter", "public", "private") ); @@ -314,7 +298,7 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr // Though search for both alias1 and alias2 is effectively full read access to index, // it does not share the cache entry of the power client because role queries still exist. assertSearchResponse( - limitedClient.prepareSearch(ALIAS1, ALIAS2).setRequestCache(true).get(), + limitedClient.prepareSearch(ALIAS1, ALIAS2).setRequestCache(true), Set.of("1", "2"), Set.of("number", "letter", "public", "private") ); @@ -325,7 +309,7 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr // It should not reuse any entries from the cache assertSearchResponse( - limitedClientApiKey.prepareSearch(ALL_ALIAS).setRequestCache(true).get(), + limitedClientApiKey.prepareSearch(ALL_ALIAS).setRequestCache(true), Set.of("1"), Set.of("letter", "public", "private") ); @@ -341,43 +325,23 @@ public void testRequestCacheWithTemplateRoleQuery() { ); // Search first with user1 and only one document will be return with the corresponding username - assertSearchResponse( - client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true).get(), - Set.of("1"), - Set.of("username") - ); + assertSearchResponse(client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true), Set.of("1"), Set.of("username")); assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 0, 1); // Search with user2 will not use user1's cache because template query is resolved differently for them - assertSearchResponse( - client2.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true).get(), - Set.of("2"), - Set.of("username") - ); + assertSearchResponse(client2.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true), Set.of("2"), Set.of("username")); assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 0, 2); // Search with user1 again will use user1's cache - assertSearchResponse( - client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true).get(), - Set.of("1"), - Set.of("username") - ); + assertSearchResponse(client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true), Set.of("1"), Set.of("username")); assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 1, 2); // Search with user2 again will use user2's cache - assertSearchResponse( - client2.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true).get(), - Set.of("2"), - Set.of("username") - ); + assertSearchResponse(client2.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_INDEX).setRequestCache(true), Set.of("2"), Set.of("username")); assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 2, 2); // Since the DLS for the alias uses a stored script, this should cause the request cached to be disabled - assertSearchResponse( - client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_ALIAS).setRequestCache(true).get(), - Set.of("1"), - Set.of("username") - ); + assertSearchResponse(client1.prepareSearch(DLS_TEMPLATE_ROLE_QUERY_ALIAS).setRequestCache(true), Set.of("1"), Set.of("username")); // No cache should be used assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 2, 2); } @@ -455,19 +419,24 @@ private Client limitedClientApiKey() throws ExecutionException, InterruptedExcep return client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKey)); } - private void assertSearchResponse(SearchResponse searchResponse, Set docIds) { - assertSearchResponse(searchResponse, docIds, null); + private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set docIds) { + assertSearchResponse(requestBuilder, docIds, null); } - private void assertSearchResponse(SearchResponse searchResponse, Set docIds, Set fieldNames) { - assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docIds.size())); - final SearchHit[] hits = searchResponse.getHits().getHits(); - assertThat(Arrays.stream(hits).map(SearchHit::getId).collect(Collectors.toUnmodifiableSet()), equalTo(docIds)); - if (fieldNames != null) { - for (SearchHit hit : hits) { - assertThat(hit.getSourceAsMap().keySet(), equalTo(fieldNames)); + private void assertSearchResponse(SearchRequestBuilder requestBuilder, Set docIds, Set fieldNames) { + var searchResponse = requestBuilder.get(); + try { + assertThat(searchResponse.getFailedShards(), equalTo(0)); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docIds.size())); + final SearchHit[] hits = searchResponse.getHits().getHits(); + assertThat(Arrays.stream(hits).map(SearchHit::getId).collect(Collectors.toUnmodifiableSet()), equalTo(docIds)); + if (fieldNames != null) { + for (SearchHit hit : hits) { + assertThat(hit.getSourceAsMap().keySet(), equalTo(fieldNames)); + } } + } finally { + searchResponse.decRef(); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java index 164d28216ea93..57d18abaf1a92 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.settings.SecureString; @@ -35,6 +34,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -113,32 +113,41 @@ public void testSimpleQuery() { prepareIndex("test").setId("1").setSource("id", "1", "field1", "value1").setRefreshPolicy(IMMEDIATE).get(); prepareIndex("test").setId("2").setSource("id", "2", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("1")); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("2")); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareSearch("test") - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "1", "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("1")); + } + ); + + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "2"); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("2")); + } + ); + + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test") + .addSort("id", SortOrder.ASC), + response -> { + assertHitCount(response, 2); + assertSearchHits(response, "1", "2"); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); } public void testUpdatesAreRejected() { @@ -181,13 +190,17 @@ public void testDLSIsAppliedBeforeFLS() { prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value1").setRefreshPolicy(IMMEDIATE).get(); prepareIndex("test").setId("2").setSource("field1", "value2", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD)) - ).prepareSearch("test").setQuery(QueryBuilders.termQuery("field1", "value2")).get(); - assertHitCount(response, 1); - assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("field1", "value2")), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "2"); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value2")); + } + ); assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) @@ -209,48 +222,60 @@ public void testQueryCache() { // Both users have the same role query, but user3 has access to field2 and not field1, which should result in zero hits: int max = scaledRandomIntBetween(4, 32); for (int i = 0; i < max; i++) { - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + } + ); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), equalTo("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + } + ); // this is a bit weird the document level permission (all docs with field2:value2) don't match with the field level // permissions (field1), // this results in document 2 being returned but no fields are visible: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + } + ); // user4 has all roles - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareSearch("test") - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 2); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("id"), equalTo("2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test") + .addSort("id", SortOrder.ASC), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(1).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2"), equalTo("value2")); + assertThat(response.getHits().getAt(1).getSourceAsMap().get("id"), equalTo("2")); + } + ); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityFeatureUsageTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityFeatureUsageTests.java index 116e94cafcadf..258c4acd6c7f2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityFeatureUsageTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityFeatureUsageTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.integration; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; @@ -20,6 +19,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -89,13 +89,16 @@ public void testDlsFeatureUsageTracking() throws Exception { prepareIndex("test").setId("2").setSource("field2", "value2").setRefreshPolicy(IMMEDIATE).get(); prepareIndex("test").setId("3").setSource("field3", "value3").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = internalCluster().coordOnlyNodeClient() - .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(randomBoolean() ? QueryBuilders.termQuery("field1", "value1") : QueryBuilders.matchAllQuery()) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertResponse( + internalCluster().coordOnlyNodeClient() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(randomBoolean() ? QueryBuilders.termQuery("field1", "value1") : QueryBuilders.matchAllQuery()), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + } + ); // coordinating only node should not tack DLS/FLS feature usage assertDlsFlsNotTrackedOnCoordOnlyNode(); @@ -109,13 +112,15 @@ public void testDlsFlsFeatureUsageNotTracked() { prepareIndex("test").setId("2").setSource("id", "2", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); // Running a search with user2 (which has role3 without DLS/FLS) should not trigger feature tracking. - SearchResponse response = internalCluster().coordOnlyNodeClient() - .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "1", "2"); - + assertResponse( + internalCluster().coordOnlyNodeClient() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertHitCount(response, 2); + assertSearchHits(response, "1", "2"); + } + ); assertDlsFlsNotTrackedAcrossAllNodes(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java index 61126810e3df1..73897fc38633a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; @@ -21,6 +20,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; @@ -105,14 +105,17 @@ public void testDuelWithAliasFilters() throws Exception { builder.get(); for (int roleI = 1; roleI <= numberOfRoles; roleI++) { - SearchResponse searchResponse1 = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user" + roleI, USERS_PASSWD)) - ).prepareSearch("test").get(); - SearchResponse searchResponse2 = prepareSearch("alias" + roleI).get(); - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); - for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { - assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); - } + final int role = roleI; + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user" + roleI, USERS_PASSWD))) + .prepareSearch("test"), + searchResponse1 -> assertResponse(prepareSearch("alias" + role), searchResponse2 -> { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { + assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); + } + }) + ); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index e42fab4708b8a..c10dc7f1da25c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -92,7 +92,9 @@ import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; @@ -542,18 +544,19 @@ public void testPercolateQueryWithIndexedDocWithDLS() { .setRefreshPolicy(IMMEDIATE) .get(); // user1 can preform the percolate search for doc#1 in the doc_index because user1 has access to the doc - SearchResponse result = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("query_index").setQuery(new PercolateQueryBuilder("query", "doc_index", "1", null, null, null)).get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(new PercolateQueryBuilder("query", "doc_index", "1", null, null, null)), + 1 + ); // user2 can access the query_index itself (without performing percolate search) - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("query_index") - .setQuery(QueryBuilders.matchAllQuery()) - .get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(QueryBuilders.matchAllQuery()), + 1 + ); // user2 cannot access doc#1 of the doc_index so the percolate search fails because doc#1 cannot be found ResourceNotFoundException e = expectThrows( ResourceNotFoundException.class, @@ -587,7 +590,6 @@ public void testGeoQueryWithIndexedShapeWithDLS() { ShapeQueryBuilder shapeQuery = new ShapeQueryBuilder("search_field", "1").relation(ShapeRelation.WITHIN) .indexedShapeIndex("shape_index") .indexedShapePath("shape_field"); - SearchResponse result; // user1 has access to doc#1 of the shape_index so everything works SearchRequestBuilder requestBuilder = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) @@ -597,16 +599,14 @@ public void testGeoQueryWithIndexedShapeWithDLS() { } else { requestBuilder.setQuery(shapeQuery); } - result = requestBuilder.get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(requestBuilder, 1); // user2 does not have access to doc#1 of the shape_index - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("search_index") - .setQuery(QueryBuilders.matchAllQuery()) - .get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(QueryBuilders.matchAllQuery()), + 1 + ); IllegalArgumentException e; if (randomBoolean()) { e = expectThrows( @@ -696,8 +696,7 @@ public void testTermsLookupOnIndexWithDLS() { assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) .prepareSearch("search_index") - .setQuery(lookup) - .get(), + .setQuery(lookup), 0 ); assertSearchHitsWithoutFailures( @@ -877,36 +876,48 @@ public void testKnnSearch() throws Exception { } // user1 should only be able to see docs with field1: value1 - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(query).addFetchField("field1").setSize(10).get(); - assertEquals(5, response.getHits().getTotalHits().value); - assertEquals(5, response.getHits().getHits().length); - for (SearchHit hit : response.getHits().getHits()) { - assertNotNull(hit.field("field1")); - } + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(query) + .addFetchField("field1") + .setSize(10), + response -> { + assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getHits().length); + for (SearchHit hit : response.getHits().getHits()) { + assertNotNull(hit.field("field1")); + } + } + ); // user2 should only be able to see docs with field2: value2 - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(query) - .addFetchField("field2") - .setSize(10) - .get(); - assertEquals(5, response.getHits().getTotalHits().value); - assertEquals(5, response.getHits().getHits().length); - for (SearchHit hit : response.getHits().getHits()) { - assertNotNull(hit.field("field2")); - } + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(query) + .addFetchField("field2") + .setSize(10), + response -> { + assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getHits().length); + for (SearchHit hit : response.getHits().getHits()) { + assertNotNull(hit.field("field2")); + } + } + ); // user3 can see all indexed docs - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(query) - .setSize(10) - .get(); - assertEquals(10, response.getHits().getTotalHits().value); - assertEquals(10, response.getHits().getHits().length); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(query) + .setSize(10), + response -> { + assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getHits().length); + } + ); } public void testGlobalAggregation() throws Exception { @@ -918,53 +929,63 @@ public void testGlobalAggregation() throws Exception { prepareIndex("test").setId("2").setSource("field2", "value2").setRefreshPolicy(IMMEDIATE).get(); prepareIndex("test").setId("3").setSource("field3", "value3").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = prepareSearch("test").addAggregation( - AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2")) - ).get(); - assertHitCount(response, 3); - assertSearchHits(response, "1", "2", "3"); - - Global globalAgg = response.getAggregations().get("global"); - assertThat(globalAgg.getDocCount(), equalTo(3L)); - Terms termsAgg = globalAgg.getAggregations().get("field2"); - assertThat(termsAgg.getBuckets().get(0).getKeyAsString(), equalTo("value2")); - assertThat(termsAgg.getBuckets().get(0).getDocCount(), equalTo(1L)); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - - globalAgg = response.getAggregations().get("global"); - assertThat(globalAgg.getDocCount(), equalTo(1L)); - termsAgg = globalAgg.getAggregations().get("field2"); - assertThat(termsAgg.getBuckets().size(), equalTo(0)); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "2"); - - globalAgg = response.getAggregations().get("global"); - assertThat(globalAgg.getDocCount(), equalTo(1L)); - termsAgg = globalAgg.getAggregations().get("field2"); - assertThat(termsAgg.getBuckets().size(), equalTo(1)); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))) - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "1", "2"); - - globalAgg = response.getAggregations().get("global"); - assertThat(globalAgg.getDocCount(), equalTo(2L)); - termsAgg = globalAgg.getAggregations().get("field2"); - assertThat(termsAgg.getBuckets().size(), equalTo(1)); + assertResponse( + prepareSearch("test").addAggregation( + AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2")) + ), + response -> { + assertHitCount(response, 3); + assertSearchHits(response, "1", "2", "3"); + + Global globalAgg = response.getAggregations().get("global"); + assertThat(globalAgg.getDocCount(), equalTo(3L)); + Terms termsAgg = globalAgg.getAggregations().get("field2"); + assertThat(termsAgg.getBuckets().get(0).getKeyAsString(), equalTo("value2")); + assertThat(termsAgg.getBuckets().get(0).getDocCount(), equalTo(1L)); + } + ); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + + Global globalAgg = response.getAggregations().get("global"); + assertThat(globalAgg.getDocCount(), equalTo(1L)); + Terms termsAgg = globalAgg.getAggregations().get("field2"); + assertThat(termsAgg.getBuckets().size(), equalTo(0)); + } + ); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "2"); + + Global globalAgg = response.getAggregations().get("global"); + assertThat(globalAgg.getDocCount(), equalTo(1L)); + Terms termsAgg = globalAgg.getAggregations().get("field2"); + assertThat(termsAgg.getBuckets().size(), equalTo(1)); + } + ); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))), + response -> { + assertHitCount(response, 2); + assertSearchHits(response, "1", "2"); + + Global globalAgg = response.getAggregations().get("global"); + assertThat(globalAgg.getDocCount(), equalTo(2L)); + Terms termsAgg = globalAgg.getAggregations().get("field2"); + assertThat(termsAgg.getBuckets().size(), equalTo(1)); + } + ); } public void testParentChild() throws Exception { @@ -1016,17 +1037,20 @@ public void testParentChild() throws Exception { } private void verifyParentChild() { - SearchResponse searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - - searchResponse = prepareSearch("test").setQuery(hasParentQuery("parent", matchAllQuery(), false)) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(searchResponse, 3L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); - assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("c3")); + assertResponse(prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)), searchResponse -> { + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + }); + + assertResponse( + prepareSearch("test").setQuery(hasParentQuery("parent", matchAllQuery(), false)).addSort("id", SortOrder.ASC), + searchResponse -> { + assertHitCount(searchResponse, 3L); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); + assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); + assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("c3")); + } + ); // Both user1 and user2 can't see field1 and field2, no parent/child query should yield results: assertHitCount( @@ -1058,20 +1082,26 @@ private void verifyParentChild() { ); // user 3 can see them but not c3 - searchResponse = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)), + searchResponse -> { + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + } + ); - searchResponse = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(hasParentQuery("parent", matchAllQuery(), false)) - .get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasParentQuery("parent", matchAllQuery(), false)), + searchResponse -> { + assertHitCount(searchResponse, 2L); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); + assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); + } + ); } public void testScroll() throws Exception { @@ -1111,6 +1141,7 @@ public void testScroll() throws Exception { break; } + response.decRef(); response = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueMinutes(1L)).get(); @@ -1118,6 +1149,7 @@ public void testScroll() throws Exception { } finally { if (response != null) { String scrollId = response.getScrollId(); + response.decRef(); if (scrollId != null) { client().prepareClearScroll().addScrollId(scrollId).get(); } @@ -1148,6 +1180,9 @@ public void testReaderId() throws Exception { SearchResponse response = null; try { for (int from = 0; from < numVisible; from++) { + if (response != null) { + response.decRef(); + } response = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ) @@ -1164,6 +1199,7 @@ public void testReaderId() throws Exception { } } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(response.pointInTimeId())).actionGet(); + response.decRef(); } } @@ -1181,27 +1217,30 @@ public void testRequestCache() throws Exception { int max = scaledRandomIntBetween(4, 32); for (int i = 0; i < max; i++) { Boolean requestCache = randomFrom(true, null); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setSize(0).setQuery(termQuery("field1", "value1")).setRequestCache(requestCache).get(); - assertNoFailures(response); - assertHitCount(response, 1); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setSize(0) - .setQuery(termQuery("field1", "value1")) - .setRequestCache(requestCache) - .get(); - assertNoFailures(response); - assertHitCount(response, 0); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .setSize(0) - .setQuery(termQuery("field1", "value1")) - .setRequestCache(requestCache) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setSize(0) + .setQuery(termQuery("field1", "value1")) + .setRequestCache(requestCache), + 1 + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setSize(0) + .setQuery(termQuery("field1", "value1")) + .setRequestCache(requestCache), + 0 + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .setSize(0) + .setQuery(termQuery("field1", "value1")) + .setRequestCache(requestCache), + 1 + ); } } @@ -1278,27 +1317,34 @@ public void testNestedInnerHits() throws Exception { .get(); refresh("test"); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD)) - ) - .prepareSearch("test") - .setQuery( - QueryBuilders.nestedQuery("nested_field", QueryBuilders.termQuery("nested_field.field2", "value2"), ScoreMode.None) - .innerHit(new InnerHitBuilder()) - ) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getSourceAsString(), - equalTo("{\"field2\":\"value2\"}") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getNestedIdentity().getOffset(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getSourceAsString(), - equalTo("{\"field2\":[\"value2\",\"value3\"]}") + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test") + .setQuery( + QueryBuilders.nestedQuery("nested_field", QueryBuilders.termQuery("nested_field.field2", "value2"), ScoreMode.None) + .innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getId(), equalTo("1")); + assertThat( + response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getNestedIdentity().getOffset(), + equalTo(0) + ); + assertThat( + response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getSourceAsString(), + equalTo("{\"field2\":\"value2\"}") + ); + assertThat( + response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getNestedIdentity().getOffset(), + equalTo(1) + ); + assertThat( + response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getSourceAsString(), + equalTo("{\"field2\":[\"value2\",\"value3\"]}") + ); + } ); } @@ -1342,16 +1388,19 @@ public void testSuggesters() throws Exception { ); // Term suggester: - SearchResponse response = prepareSearch("test").suggest( - new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new TermSuggestionBuilder("suggest_field1")) - ).get(); - assertNoFailures(response); - - TermSuggestion termSuggestion = response.getSuggest().getSuggestion("_name1"); - assertThat(termSuggestion, notNullValue()); - assertThat(termSuggestion.getEntries().size(), equalTo(1)); - assertThat(termSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); - assertThat(termSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); + assertNoFailuresAndResponse( + prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new TermSuggestionBuilder("suggest_field1")) + ), + response -> { + + TermSuggestion termSuggestion = response.getSuggest().getSuggestion("_name1"); + assertThat(termSuggestion, notNullValue()); + assertThat(termSuggestion.getEntries().size(), equalTo(1)); + assertThat(termSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); + assertThat(termSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); + } + ); final String[] indices = randomFrom( List.of(new String[] { "test" }, new String[] { "fls-index", "test" }, new String[] { "test", "fls-index" }) @@ -1367,17 +1416,19 @@ public void testSuggesters() throws Exception { assertThat(e.getMessage(), equalTo("Suggest isn't supported if document level security is enabled")); // Phrase suggester: - response = prepareSearch("test").suggest( - new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new PhraseSuggestionBuilder("suggest_field1")) - ).get(); - assertNoFailures(response); - - PhraseSuggestion phraseSuggestion = response.getSuggest().getSuggestion("_name1"); - assertThat(phraseSuggestion, notNullValue()); - assertThat(phraseSuggestion.getEntries().size(), equalTo(1)); - assertThat(phraseSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); - assertThat(phraseSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); - + assertNoFailuresAndResponse( + prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new PhraseSuggestionBuilder("suggest_field1")) + ), + response -> { + + PhraseSuggestion phraseSuggestion = response.getSuggest().getSuggestion("_name1"); + assertThat(phraseSuggestion, notNullValue()); + assertThat(phraseSuggestion.getEntries().size(), equalTo(1)); + assertThat(phraseSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); + assertThat(phraseSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); + } + ); e = expectThrows( ElasticsearchSecurityException.class, () -> client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) @@ -1388,16 +1439,18 @@ public void testSuggesters() throws Exception { assertThat(e.getMessage(), equalTo("Suggest isn't supported if document level security is enabled")); // Completion suggester: - response = prepareSearch("test").suggest( - new SuggestBuilder().setGlobalText("valu").addSuggestion("_name1", new CompletionSuggestionBuilder("suggest_field2")) - ).get(); - assertNoFailures(response); - - CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("_name1"); - assertThat(completionSuggestion, notNullValue()); - assertThat(completionSuggestion.getEntries().size(), equalTo(1)); - assertThat(completionSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); - assertThat(completionSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); + assertNoFailuresAndResponse( + prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valu").addSuggestion("_name1", new CompletionSuggestionBuilder("suggest_field2")) + ), + response -> { + CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("_name1"); + assertThat(completionSuggestion, notNullValue()); + assertThat(completionSuggestion.getEntries().size(), equalTo(1)); + assertThat(completionSuggestion.getEntries().get(0).getOptions().size(), equalTo(1)); + assertThat(completionSuggestion.getEntries().get(0).getOptions().get(0).getText().string(), equalTo("value")); + } + ); e = expectThrows( ElasticsearchSecurityException.class, @@ -1433,18 +1486,20 @@ public void testProfile() throws Exception { .setMapping("field1", "type=text", "other_field", "type=text", "yet_another", "type=text") ); - SearchResponse response = prepareSearch("test").setProfile(true).setQuery(new FuzzyQueryBuilder("other_field", "valeu")).get(); - assertNoFailures(response); - - assertThat(response.getProfileResults().size(), equalTo(1)); - SearchProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); - assertThat(shardResult.getQueryProfileResults().size(), equalTo(1)); - QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0); - assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1)); - logger.info("queryProfileShardResult=" + Strings.toString(queryProfileShardResult)); - assertThat( - queryProfileShardResult.getQueryResults().stream().map(ProfileResult::getLuceneDescription).sorted().collect(toList()), - equalTo(List.of("(other_field:value)^0.8")) + assertNoFailuresAndResponse( + prepareSearch("test").setProfile(true).setQuery(new FuzzyQueryBuilder("other_field", "valeu")), + response -> { + assertThat(response.getProfileResults().size(), equalTo(1)); + SearchProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); + assertThat(shardResult.getQueryProfileResults().size(), equalTo(1)); + QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0); + assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1)); + logger.info("queryProfileShardResult=" + Strings.toString(queryProfileShardResult)); + assertThat( + queryProfileShardResult.getQueryResults().stream().map(ProfileResult::getLuceneDescription).sorted().collect(toList()), + equalTo(List.of("(other_field:value)^0.8")) + ); + } ); final String[] indices = randomFrom( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 40672bf597b8c..34eecd57b53d5 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.integration; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; @@ -29,6 +28,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; @@ -194,64 +194,74 @@ public void testDuel() throws Exception { } indexRandom(true, requests); - SearchResponse actual = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) - ) - .prepareSearch("test") - .addSort("id", SortOrder.ASC) - .setQuery( - QueryBuilders.boolQuery() - .should(QueryBuilders.termQuery("field1", "value")) - .should(QueryBuilders.termQuery("field2", "value")) - .should(QueryBuilders.termQuery("field3", "value")) + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("id", SortOrder.ASC) + .setQuery( + QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery("field1", "value")) + .should(QueryBuilders.termQuery("field2", "value")) + .should(QueryBuilders.termQuery("field3", "value")) + ), + actual -> assertResponse( + prepareSearch("test").addSort("id", SortOrder.ASC) + .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), + expected -> { + assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); + for (int i = 0; i < actual.getHits().getHits().length; i++) { + assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); + } + } ) - .get(); - SearchResponse expected = prepareSearch("test").addSort("id", SortOrder.ASC) - .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))) - .get(); - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); - assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); - for (int i = 0; i < actual.getHits().getHits().length; i++) { - assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); - } + ); - actual = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .addSort("id", SortOrder.ASC) - .setQuery( - QueryBuilders.boolQuery() - .should(QueryBuilders.termQuery("field1", "value")) - .should(QueryBuilders.termQuery("field2", "value")) - .should(QueryBuilders.termQuery("field3", "value")) + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .addSort("id", SortOrder.ASC) + .setQuery( + QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery("field1", "value")) + .should(QueryBuilders.termQuery("field2", "value")) + .should(QueryBuilders.termQuery("field3", "value")) + ), + actual -> assertResponse( + prepareSearch("test").addSort("id", SortOrder.ASC) + .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), + expected -> { + assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); + for (int i = 0; i < actual.getHits().getHits().length; i++) { + assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); + } + } ) - .get(); - expected = prepareSearch("test").addSort("id", SortOrder.ASC) - .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))) - .get(); - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); - assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); - for (int i = 0; i < actual.getHits().getHits().length; i++) { - assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); - } + ); - actual = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareSearch("test") - .addSort("id", SortOrder.ASC) - .setQuery( - QueryBuilders.boolQuery() - .should(QueryBuilders.termQuery("field1", "value")) - .should(QueryBuilders.termQuery("field2", "value")) - .should(QueryBuilders.termQuery("field3", "value")) + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test") + .addSort("id", SortOrder.ASC) + .setQuery( + QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery("field1", "value")) + .should(QueryBuilders.termQuery("field2", "value")) + .should(QueryBuilders.termQuery("field3", "value")) + ), + actual -> assertResponse( + prepareSearch("test").addSort("id", SortOrder.ASC) + .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), + expected -> { + assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); + for (int i = 0; i < actual.getHits().getHits().length; i++) { + assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); + } + } ) - .get(); - expected = prepareSearch("test").addSort("id", SortOrder.ASC) - .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))) - .get(); - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); - assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); - for (int i = 0; i < actual.getHits().getHits().length; i++) { - assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); - } + ); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 9c962095b3229..83be62beab4ec 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -79,7 +79,7 @@ import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; @@ -424,11 +424,16 @@ public void testKnnSearch() throws IOException { KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("vector", queryVector, 10, null); // user1 has access to vector field, so the query should match with the document: - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(query).addFetchField("vector").get(); - assertHitCount(response, 1); - assertNotNull(response.getHits().getAt(0).field("vector")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(query) + .addFetchField("vector"), + response -> { + assertHitCount(response, 1); + assertNotNull(response.getHits().getAt(0).field("vector")); + } + ); // user2 has no access to vector field, so the query should not match with the document: assertHitCount( @@ -440,13 +445,15 @@ public void testKnnSearch() throws IOException { ); // check user2 cannot see the vector field, even when their search matches the document - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addFetchField("vector") - .get(); - assertHitCount(response, 1); - assertNull(response.getHits().getAt(0).field("vector")); - + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addFetchField("vector"), + response -> { + assertHitCount(response, 1); + assertNull(response.getHits().getAt(0).field("vector")); + } + ); // user1 can access field1, so the filtered query should match with the document: KnnVectorQueryBuilder filterQuery1 = new KnnVectorQueryBuilder("vector", queryVector, 10, null).addFilterQuery( QueryBuilders.matchQuery("field1", "value1") @@ -479,37 +486,38 @@ public void testPercolateQueryWithIndexedDocWithFLS() { {"field1": "value1", "field2": "A new bonsai tree in the office"}""", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); QueryBuilder percolateQuery = new PercolateQueryBuilder("query", "doc_index", "1", null, null, null); // user7 sees everything - SearchResponse result = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD)) - ).prepareSearch("query_index").setQuery(percolateQuery).get(); - assertNoFailures(result); - assertHitCount(result, 1); - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("query_index") - .setQuery(QueryBuilders.matchAllQuery()) - .get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(percolateQuery), + 1 + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(QueryBuilders.matchAllQuery()), + 1 + ); // user 3 can see the fields of the percolated document, but not the "query" field of the indexed query - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("query_index") - .setQuery(percolateQuery) - .get(); - assertNoFailures(result); - assertHitCount(result, 0); - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) - .prepareSearch("query_index") - .setQuery(QueryBuilders.matchAllQuery()) - .get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(percolateQuery), + 0 + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(QueryBuilders.matchAllQuery()), + 1 + ); // user 9 can see the fields of the index query, but not the field of the indexed document to be percolated - result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) - .prepareSearch("query_index") - .setQuery(percolateQuery) - .get(); - assertNoFailures(result); - assertHitCount(result, 0); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) + .prepareSearch("query_index") + .setQuery(percolateQuery), + 0 + ); } public void testGeoQueryWithIndexedShapeWithFLS() { @@ -556,7 +564,6 @@ public void testGeoQueryWithIndexedShapeWithFLS() { ] } }""", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); - SearchResponse result; // user sees both the querying shape and the queried point SearchRequestBuilder requestBuilder = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD)) @@ -569,9 +576,7 @@ public void testGeoQueryWithIndexedShapeWithFLS() { } else { requestBuilder.setQuery(shapeQuery1); } - result = requestBuilder.get(); - assertNoFailures(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(requestBuilder, 1); // user sees the queried point but not the querying shape final ShapeQueryBuilder shapeQuery2 = new ShapeQueryBuilder("field", "2").relation(ShapeRelation.WITHIN) .indexedShapeIndex("shape_index") @@ -607,9 +612,7 @@ public void testGeoQueryWithIndexedShapeWithFLS() { } else { requestBuilder.setQuery(shapeQuery3); } - result = requestBuilder.get(); - assertNoFailures(result); - assertHitCount(result, 0); + assertHitCountAndNoFailures(requestBuilder, 0); } public void testTermsLookupOnIndexWithFLS() { @@ -1118,6 +1121,7 @@ public void testScroll() throws Exception { break; } + response.decRef(); response = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueMinutes(1L)).get(); @@ -1126,6 +1130,7 @@ public void testScroll() throws Exception { } finally { if (response != null) { String scrollId = response.getScrollId(); + response.decRef(); if (scrollId != null) { client().prepareClearScroll().addScrollId(scrollId).get(); } @@ -1155,23 +1160,23 @@ public void testPointInTimeId() throws Exception { refresh("test"); String pitId = openPointInTime("user1", TimeValue.timeValueMinutes(1), "test"); - SearchResponse response = null; try { for (int from = 0; from < numDocs; from++) { - response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ) - .prepareSearch() - .setPointInTime(new PointInTimeBuilder(pitId)) - .setSize(1) - .setFrom(from) - .setQuery(constantScoreQuery(termQuery("field1", "value1"))) - .setFetchSource(true) - .get(); - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); - assertThat(response.getHits().getHits().length, is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch() + .setPointInTime(new PointInTimeBuilder(pitId)) + .setSize(1) + .setFrom(from) + .setQuery(constantScoreQuery(termQuery("field1", "value1"))) + .setFetchSource(true), + response -> { + assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getHits().length, is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + } + ); } } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet(); @@ -1191,26 +1196,35 @@ public void testQueryCache() throws Exception { int max = scaledRandomIntBetween(4, 32); for (int i = 0; i < max; i++) { - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(constantScoreQuery(termQuery("field1", "value1"))).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(constantScoreQuery(termQuery("field1", "value1"))) - .get(); - assertHitCount(response, 0); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(constantScoreQuery(termQuery("field1", "value1"))), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + } + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(constantScoreQuery(termQuery("field1", "value1"))), + 0 + ); String multipleFieldsUser = randomFrom("user5", "user6", "user7"); - response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) - ).prepareSearch("test").setQuery(constantScoreQuery(termQuery("field1", "value1"))).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertResponse( + client().filterWithHeader( + Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) + ).prepareSearch("test").setQuery(constantScoreQuery(termQuery("field1", "value1"))), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(3)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + } + ); } } @@ -1250,6 +1264,7 @@ public void testScrollWithQueryCache() { assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { + user2SearchResponse.decRef(); // make sure scroll is empty user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) @@ -1259,6 +1274,7 @@ public void testScrollWithQueryCache() { if (randomBoolean()) { // maybe reuse the scroll even if empty client().prepareClearScroll().addScrollId(user2SearchResponse.getScrollId()).get(); + user2SearchResponse.decRef(); user2SearchResponse = null; } } @@ -1279,6 +1295,7 @@ public void testScrollWithQueryCache() { assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); scrolledDocsUser1++; } else { + user1SearchResponse.decRef(); user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); @@ -1295,6 +1312,7 @@ public void testScrollWithQueryCache() { if (user1SearchResponse.getScrollId() != null) { client().prepareClearScroll().addScrollId(user1SearchResponse.getScrollId()).get(); } + user1SearchResponse.decRef(); user1SearchResponse = null; scrolledDocsUser1 = 0; } @@ -1305,12 +1323,14 @@ public void testScrollWithQueryCache() { } finally { if (user1SearchResponse != null) { String scrollId = user1SearchResponse.getScrollId(); + user1SearchResponse.decRef(); if (scrollId != null) { client().prepareClearScroll().addScrollId(scrollId).get(); } } if (user2SearchResponse != null) { String scrollId = user2SearchResponse.getScrollId(); + user2SearchResponse.decRef(); if (scrollId != null) { client().prepareClearScroll().addScrollId(scrollId).get(); } @@ -1329,25 +1349,29 @@ public void testRequestCache() throws Exception { int max = scaledRandomIntBetween(4, 32); for (int i = 0; i < max; i++) { Boolean requestCache = randomFrom(true, null); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setSize(0).setQuery(termQuery("field1", "value1")).setRequestCache(requestCache).get(); - assertNoFailures(response); - assertHitCount(response, 1); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setSize(0) - .setQuery(termQuery("field1", "value1")) - .setRequestCache(requestCache) - .get(); - assertNoFailures(response); - assertHitCount(response, 0); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setSize(0) + .setQuery(termQuery("field1", "value1")) + .setRequestCache(requestCache), + 1 + ); + assertHitCountAndNoFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setSize(0) + .setQuery(termQuery("field1", "value1")) + .setRequestCache(requestCache), + 0 + ); String multipleFieldsUser = randomFrom("user5", "user6", "user7"); - response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) - ).prepareSearch("test").setSize(0).setQuery(termQuery("field1", "value1")).setRequestCache(requestCache).get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures( + client().filterWithHeader( + Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) + ).prepareSearch("test").setSize(0).setQuery(termQuery("field1", "value1")).setRequestCache(requestCache), + 1 + ); } } @@ -1371,103 +1395,132 @@ public void testFields() throws Exception { .get(); // user1 is granted access to field1 only: - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").addStoredField("field1").addStoredField("field2").addStoredField("field3").get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + } + ); // user2 is granted access to field2 only: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + } + ); // user3 is granted access to field1 and field2: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + } + ); // user4 is granted access to no fields: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)) + ); // user5 has no field level security configured: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + } + ); // user6 has field level security configured with access to field*: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + } + ); // user7 has access to all fields due to a mix of roles without field level security and with: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getFields().get("field3").getValue(), equalTo("value3")); + } + ); // user8 has field level security configured with access to field1 and field2: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("field1") - .addStoredField("field2") - .addStoredField("field3") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("field1") + .addStoredField("field2") + .addStoredField("field3"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getFields().get("field1").getValue(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getFields().get("field2").getValue(), equalTo("value2")); + } + ); // user1 is granted access to field1 only, and so should be able to load it by alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("alias") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getFields().get("alias").getValue(), equalTo("value1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias"), + response -> { + assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getFields().get("alias").getValue(), equalTo("value1")); + } + ); // user2 is not granted access to field1, and so should not be able to load it by alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addStoredField("alias") - .get(); - assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addStoredField("alias"), + response -> assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)) + ); } public void testSource() throws Exception { @@ -1478,67 +1531,89 @@ public void testSource() throws Exception { .get(); // user1 is granted access to field1 only: - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + } + ); // user2 is granted access to field2 only: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); // user3 is granted access to field1 and field2: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); // user4 is granted access to no fields: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) + .prepareSearch("test"), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)) + ); // user5 has no field level security configured: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + } + ); // user6 has field level security configured with access to field*: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + } + ); // user7 has access to all fields - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + } + ); // user8 has field level security configured with access to field1 and field2: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) - .prepareSearch("test") - .get(); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) + .prepareSearch("test"), + response -> { + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); } public void testSort() { @@ -1548,45 +1623,52 @@ public void testSort() { prepareIndex("test").setId("1").setSource("field1", 1d, "field2", 2d).setRefreshPolicy(IMMEDIATE).get(); // user1 is granted to use field1, so it is included in the sort_values - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").addSort("field1", SortOrder.ASC).get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addSort("field1", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)) + ); // user2 is not granted to use field1, so the default missing sort value is included - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addSort("field1", SortOrder.ASC) - .get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("field1", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)) + ); // user1 is not granted to use field2, so the default missing sort value is included - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addSort("field2", SortOrder.ASC) - .get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addSort("field2", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)) + ); // user2 is granted to use field2, so it is included in the sort_values - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addSort("field2", SortOrder.ASC) - .get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(2L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("field2", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(2L)) + ); // user1 is granted to use field1, so it is included in the sort_values when using its alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addSort("alias", SortOrder.ASC) - .get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(1L)) + ); // user2 is not granted to use field1, so the default missing sort value is included when using its alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addSort("alias", SortOrder.ASC) - .get(); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addSort("alias", SortOrder.ASC), + response -> assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo(Long.MAX_VALUE)) + ); } public void testHighlighting() { @@ -1600,42 +1682,56 @@ public void testHighlighting() { .get(); // user1 has access to field1, so the highlight should be visible: - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(matchQuery("field1", "value1")).highlighter(new HighlightBuilder().field("field1")).get(); - assertHitCount(response, 1); - SearchHit hit = response.getHits().iterator().next(); - assertEquals(hit.getHighlightFields().size(), 1); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("field1")), + response -> { + assertHitCount(response, 1); + SearchHit hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + } + ); // user2 has no access to field1, so the highlight should not be visible: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(matchQuery("field2", "value2")) - .highlighter(new HighlightBuilder().field("field1")) - .get(); - assertHitCount(response, 1); - hit = response.getHits().iterator().next(); - assertEquals(hit.getHighlightFields().size(), 0); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("field1")), + response -> { + assertHitCount(response, 1); + var hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + } + ); // user1 has access to field1, so the highlight on its alias should be visible: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(matchQuery("field1", "value1")) - .highlighter(new HighlightBuilder().field("alias")) - .get(); - assertHitCount(response, 1); - hit = response.getHits().iterator().next(); - assertEquals(hit.getHighlightFields().size(), 1); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")) + .highlighter(new HighlightBuilder().field("alias")), + response -> { + assertHitCount(response, 1); + var hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 1); + } + ); // user2 has no access to field1, so the highlight on its alias should not be visible: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(matchQuery("field2", "value2")) - .highlighter(new HighlightBuilder().field("alias")) - .get(); - assertHitCount(response, 1); - hit = response.getHits().iterator().next(); - assertEquals(hit.getHighlightFields().size(), 0); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")) + .highlighter(new HighlightBuilder().field("alias")), + response -> { + assertHitCount(response, 1); + var hit = response.getHits().iterator().next(); + assertEquals(hit.getHighlightFields().size(), 0); + } + ); } public void testAggs() { @@ -1646,45 +1742,52 @@ public void testAggs() { prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); // user1 is authorized to use field1, so buckets are include for a term agg on field1 - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").addAggregation(AggregationBuilders.terms("_name").field("field1")).get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("field1")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)) + ); // user2 is not authorized to use field1, so no buckets are include for a term agg on field1 - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.terms("_name").field("field1")) - .get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("field1")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()) + ); // user1 is not authorized to use field2, so no buckets are include for a term agg on field2 - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.terms("_name").field("field2")) - .get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2"), nullValue()); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("field2")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2"), nullValue()) + ); // user2 is authorized to use field2, so buckets are include for a term agg on field2 - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.terms("_name").field("field2")) - .get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2").getDocCount(), equalTo(1L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("field2")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value2").getDocCount(), equalTo(1L)) + ); // user1 is authorized to use field1, so buckets are include for a term agg on its alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.terms("_name").field("alias")) - .get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1").getDocCount(), equalTo(1L)) + ); // user2 is not authorized to use field1, so no buckets are include for a term agg on its alias: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .addAggregation(AggregationBuilders.terms("_name").field("alias")) - .get(); - assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .addAggregation(AggregationBuilders.terms("_name").field("alias")), + response -> assertThat(((Terms) response.getAggregations().get("_name")).getBucketByKey("value1"), nullValue()) + ); } public void testTVApi() throws Exception { @@ -1913,12 +2016,16 @@ public void testParentChild() throws Exception { } private void verifyParentChild() { - SearchResponse searchResponse = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), + searchResponse -> { + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + } + ); assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) @@ -1928,13 +2035,16 @@ private void verifyParentChild() { ); // Perform the same checks, but using an alias for field1. - searchResponse = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), + searchResponse -> { + assertHitCount(searchResponse, 1L); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + } + ); assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) @@ -1991,22 +2101,29 @@ public void testQuery_withRoleWithFieldWildcards() { prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); // user6 has access to all fields, so the query should match with the document: - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD)) - ).prepareSearch("test").setQuery(matchQuery("field1", "value1")).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field1", "value1")), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(matchQuery("field2", "value2")) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(matchQuery("field2", "value2")), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + } + ); } public void testExistQuery() { @@ -2140,76 +2257,83 @@ public void testLookupRuntimeFields() throws Exception { .sort("field1") .runtimeMappings(Map.of("host", lookupField)) ); - SearchResponse response; // user1 has access to field1 - response = client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .search(request) - .actionGet(); - assertHitCount(response, 2); - { - SearchHit hit0 = response.getHits().getHits()[0]; - assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "host"))); - assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); - assertThat(hit0.field("host").getValues(), equalTo(List.of(Map.of("field1", List.of("192.168.1.1"))))); - } - { - SearchHit hit1 = response.getHits().getHits()[1]; - assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "host"))); - assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); - assertThat(hit1.field("host").getValues(), equalTo(List.of(Map.of("field1", List.of("192.168.1.2"))))); - } + assertResponse( + client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))).search(request), + response -> { + assertHitCount(response, 2); + { + SearchHit hit0 = response.getHits().getHits()[0]; + assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "host"))); + assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); + assertThat(hit0.field("host").getValues(), equalTo(List.of(Map.of("field1", List.of("192.168.1.1"))))); + } + { + SearchHit hit1 = response.getHits().getHits()[1]; + assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "host"))); + assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); + assertThat(hit1.field("host").getValues(), equalTo(List.of(Map.of("field1", List.of("192.168.1.2"))))); + } + } + ); // user3 has access to field1, field2 - response = client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .search(request) - .actionGet(); - assertHitCount(response, 2); - { - SearchHit hit0 = response.getHits().getHits()[0]; - assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "host"))); - assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); - assertThat(hit0.field("field2").getValues(), equalTo(List.of("out of memory"))); - assertThat( - hit0.field("host").getValues(), - equalTo(List.of(Map.of("field1", List.of("192.168.1.1"), "field2", List.of("windows")))) - ); - } - { - SearchHit hit1 = response.getHits().getHits()[1]; - assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "host"))); - assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); - assertThat(hit1.field("field2").getValues(), equalTo(List.of("authentication fails"))); - assertThat( - hit1.field("host").getValues(), - equalTo(List.of(Map.of("field1", List.of("192.168.1.2"), "field2", List.of("macos")))) - ); - } + assertResponse( + client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))).search(request), + response -> { + assertHitCount(response, 2); + { + SearchHit hit0 = response.getHits().getHits()[0]; + assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "host"))); + assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); + assertThat(hit0.field("field2").getValues(), equalTo(List.of("out of memory"))); + assertThat( + hit0.field("host").getValues(), + equalTo(List.of(Map.of("field1", List.of("192.168.1.1"), "field2", List.of("windows")))) + ); + } + { + SearchHit hit1 = response.getHits().getHits()[1]; + assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "host"))); + assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); + assertThat(hit1.field("field2").getValues(), equalTo(List.of("authentication fails"))); + assertThat( + hit1.field("host").getValues(), + equalTo(List.of(Map.of("field1", List.of("192.168.1.2"), "field2", List.of("macos")))) + ); + } + } + ); // user6 has access to field1, field2, and field3 - response = client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .search(request) - .actionGet(); - assertHitCount(response, 2); - { - SearchHit hit0 = response.getHits().getHits()[0]; - assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "field3", "host"))); - assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); - assertThat(hit0.field("field2").getValues(), equalTo(List.of("out of memory"))); - assertThat(hit0.field("field3").getValues(), equalTo(List.of("2021-01-20"))); - assertThat( - hit0.field("host").getValues(), - equalTo(List.of(Map.of("field1", List.of("192.168.1.1"), "field2", List.of("windows"), "field3", List.of("canada")))) - ); - } - { - SearchHit hit1 = response.getHits().getHits()[1]; - assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "field3", "host"))); - assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); - assertThat(hit1.field("field2").getValues(), equalTo(List.of("authentication fails"))); - assertThat(hit1.field("field3").getValues(), equalTo(List.of("2021-01-21"))); - assertThat( - hit1.field("host").getValues(), - equalTo(List.of(Map.of("field1", List.of("192.168.1.2"), "field2", List.of("macos"), "field3", List.of("us")))) - ); - } + assertResponse( + client().filterWithHeader(Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))).search(request), + response -> { + assertHitCount(response, 2); + { + SearchHit hit0 = response.getHits().getHits()[0]; + assertThat(hit0.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "field3", "host"))); + assertThat(hit0.field("field1").getValues(), equalTo(List.of("192.168.1.1"))); + assertThat(hit0.field("field2").getValues(), equalTo(List.of("out of memory"))); + assertThat(hit0.field("field3").getValues(), equalTo(List.of("2021-01-20"))); + assertThat( + hit0.field("host").getValues(), + equalTo( + List.of(Map.of("field1", List.of("192.168.1.1"), "field2", List.of("windows"), "field3", List.of("canada"))) + ) + ); + } + { + SearchHit hit1 = response.getHits().getHits()[1]; + assertThat(hit1.getDocumentFields().keySet(), equalTo(Set.of("field1", "field2", "field3", "host"))); + assertThat(hit1.field("field1").getValues(), equalTo(List.of("192.168.1.2"))); + assertThat(hit1.field("field2").getValues(), equalTo(List.of("authentication fails"))); + assertThat(hit1.field("field3").getValues(), equalTo(List.of("2021-01-21"))); + assertThat( + hit1.field("host").getValues(), + equalTo(List.of(Map.of("field1", List.of("192.168.1.2"), "field2", List.of("macos"), "field3", List.of("us")))) + ); + } + } + ); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java index 0e799589409f8..0566784e28153 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.settings.SecureString; @@ -35,6 +34,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; @@ -134,52 +134,67 @@ public void testSearchResolveWildcardsRegexs() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(QueryBuilders.termQuery("_id", "1")).get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - Map source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field1"), equalTo("value1")); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("my_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field2"), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + } + ); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("an_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("*_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(2)); - assertThat((String) source.get("field2"), equalTo("value2")); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("*_alias", "t*") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(3)); - assertThat((String) source.get("field1"), equalTo("value1")); - assertThat((String) source.get("field2"), equalTo("value2")); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); } public void testSearchResolveDataStreams() throws Exception { @@ -201,52 +216,68 @@ public void testSearchResolveDataStreams() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("test").setQuery(QueryBuilders.termQuery("_id", "1")).get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - Map source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field1"), equalTo("value1")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("my_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field2"), equalTo("value2")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("an_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(1)); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("*_alias") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(2)); - assertThat((String) source.get("field2"), equalTo("value2")); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareSearch("*_alias", "t*") - .setQuery(QueryBuilders.termQuery("_id", "1")) - .get(); - assertThat(response.getHits().getHits().length, equalTo(1)); - source = response.getHits().getHits()[0].getSourceAsMap(); - assertThat(source.size(), equalTo(3)); - assertThat((String) source.get("field1"), equalTo("value1")); - assertThat((String) source.get("field2"), equalTo("value2")); - assertThat((String) source.get("field3"), equalTo("value3")); + assertResponse( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")), + response -> { + assertThat(response.getHits().getHits().length, equalTo(1)); + var source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + ); } private void putComposableIndexTemplate(String id, List patterns) throws IOException { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index afe9e68716579..d4375d15e6a6d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; @@ -24,6 +23,7 @@ import java.util.Map; import static java.util.Collections.singletonMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -102,32 +102,34 @@ public void testSearchAndMSearch() throws Exception { final String field = "foo"; indexRandom(true, prepareIndex(index).setSource(field, "bar")); - SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).get(); - final long hits = response.getHits().getTotalHits().value; - assertThat(hits, greaterThan(0L)); - response = client().filterWithHeader( - singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).get(); - assertEquals(response.getHits().getTotalHits().value, hits); - - final long multiHits; - MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() - .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) - .get(); - try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { + final long hits = response.getHits().getTotalHits().value; assertThat(hits, greaterThan(0L)); - } finally { - multiSearchResponse.decRef(); - } - multiSearchResponse = client().filterWithHeader( - singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); - try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); - } finally { - multiSearchResponse.decRef(); - } + assertResponse( + client().filterWithHeader( + singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) + ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), + response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + ); + final long multiHits; + MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() + .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) + .get(); + try { + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + assertThat(hits, greaterThan(0L)); + } finally { + multiSearchResponse.decRef(); + } + multiSearchResponse = client().filterWithHeader( + singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) + ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); + try { + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + } finally { + multiSearchResponse.decRef(); + } + }); } public void testGetIndex() throws Exception { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index 2e5d92839d3f7..08fb0c79a076c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -39,7 +38,9 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -144,19 +145,13 @@ public void testSingleRole() throws Exception { Client client = client(); // no specifying an index, should replace indices with the permitted ones (test & test1) - SearchResponse searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2); + assertHitCountAndNoFailures(prepareSearch().setQuery(matchAllQuery()), 2); // _all should expand to all the permitted indices - searchResponse = client.prepareSearch("_all").setQuery(matchAllQuery()).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2); + assertHitCountAndNoFailures(client.prepareSearch("_all").setQuery(matchAllQuery()), 2); // wildcards should expand to all the permitted indices - searchResponse = client.prepareSearch("test*").setQuery(matchAllQuery()).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2); + assertHitCountAndNoFailures(client.prepareSearch("test*").setQuery(matchAllQuery()), 2); try { client.prepareSearch("test", "test2").setQuery(matchAllQuery()).get(); @@ -174,7 +169,7 @@ public void testSingleRole() throws Exception { MultiSearchResponse.Item[] items = msearchResponse.getResponses(); assertThat(items.length, is(2)); assertThat(items[0].isFailure(), is(false)); - searchResponse = items[0].getResponse(); + var searchResponse = items[0].getResponse(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); assertThat(items[1].isFailure(), is(false)); @@ -252,18 +247,18 @@ public void testMultipleRoles() throws Exception { Client client = client(); - SearchResponse response = client.filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_a", USERS_PASSWD)) - ).prepareSearch("a").get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures( + client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_a", USERS_PASSWD))) + .prepareSearch("a"), + 1 + ); String[] indices = randomDouble() < 0.3 ? new String[] { "_all" } : randomBoolean() ? new String[] { "*" } : new String[] {}; - response = client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_a", USERS_PASSWD))) - .prepareSearch(indices) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures( + client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_a", USERS_PASSWD))) + .prepareSearch(indices), + 1 + ); try { indices = randomBoolean() ? new String[] { "a", "b" } : new String[] { "b", "a" }; @@ -279,25 +274,25 @@ public void testMultipleRoles() throws Exception { assertThat(e.status(), is(RestStatus.FORBIDDEN)); } - response = client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) - .prepareSearch("b") - .get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures( + client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) + .prepareSearch("b"), + 1 + ); indices = randomBoolean() ? new String[] { "a", "b" } : new String[] { "b", "a" }; - response = client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) - .prepareSearch(indices) - .get(); - assertNoFailures(response); - assertHitCount(response, 2); + assertHitCountAndNoFailures( + client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) + .prepareSearch(indices), + 2 + ); indices = randomDouble() < 0.3 ? new String[] { "_all" } : randomBoolean() ? new String[] { "*" } : new String[] {}; - response = client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) - .prepareSearch(indices) - .get(); - assertNoFailures(response); - assertHitCount(response, 2); + assertHitCountAndNoFailures( + client.filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_ab", USERS_PASSWD))) + .prepareSearch(indices), + 2 + ); } public void testMultiNamesWorkCorrectly() { @@ -313,8 +308,10 @@ public void testMultiNamesWorkCorrectly() { Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_a", USERS_PASSWD)) ); - final SearchResponse searchResponse = userAClient.prepareSearch("alias1").setSize(0).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertResponse( + userAClient.prepareSearch("alias1").setSize(0), + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + ); final ElasticsearchSecurityException e1 = expectThrows( ElasticsearchSecurityException.class, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java index 30f8507325a7e..82622b03d8d52 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.integration; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.test.SecurityIntegTestCase; @@ -16,6 +15,7 @@ import org.junit.Before; import static java.util.Collections.singletonMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -47,15 +47,19 @@ public void loadData() { } public void testThatTermsFilterQueryDoesntLeakData() { - SearchResponse response = prepareSearch("data").setQuery( - QueryBuilders.constantScoreQuery(QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens"))) - ).get(); - assertThat(response.isTimedOut(), is(false)); - assertThat(response.getHits().getHits().length, is(1)); + assertResponse( + prepareSearch("data").setQuery( + QueryBuilders.constantScoreQuery(QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens"))) + ), + response -> { + assertThat(response.isTimedOut(), is(false)); + assertThat(response.getHits().getHits().length, is(1)); + } + ); // Repeat with unauthorized user!!!! try { - response = client().filterWithHeader( + var response = client().filterWithHeader( singletonMap( "Authorization", basicAuthHeaderValue(READ_ONE_IDX_USER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index c1925b71608c1..7fc4c1520f9c6 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -92,21 +92,14 @@ public void testFetchAllByEntityWithBrokenScroll() { false, 1 ); - SearchResponse response = new SearchResponse( - internalResponse, - scrollId, - 1, - 1, - 0, - 0, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); Answer returnResponse = invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(response); + ActionListener.respondAndRelease( + listener, + new SearchResponse(internalResponse, scrollId, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY) + ); return null; }; doAnswer(returnResponse).when(client).search(eq(request), any()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index cb07cd76a5faa..9d56528a060c3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; @@ -44,13 +43,11 @@ import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateResponse; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.DeleteUserResponse; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.core.security.action.user.PutUserRequestBuilder; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequestBuilder; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -66,6 +63,8 @@ import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.LocalStateSecurity; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; +import org.elasticsearch.xpack.security.action.user.SetEnabledRequestBuilder; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.junit.Before; import org.junit.BeforeClass; @@ -81,6 +80,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; @@ -319,9 +319,10 @@ private void testAddUserAndRoleThenAuth(String username, String roleName) { prepareIndex("idx").setId("1").setSource("body", "foo").setRefreshPolicy(IMMEDIATE).get(); String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); - SearchResponse searchResp = client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx").get(); - - assertEquals(1L, searchResp.getHits().getTotalHits().value); + assertResponse( + client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); } @@ -341,9 +342,10 @@ public void testUpdatingUserAndAuthentication() throws Exception { // Index a document with the default test user prepareIndex("idx").setId("1").setSource("body", "foo").setRefreshPolicy(IMMEDIATE).get(); String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); - SearchResponse searchResp = client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx").get(); - - assertEquals(1L, searchResp.getHits().getTotalHits().value); + assertResponse( + client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -356,8 +358,10 @@ public void testUpdatingUserAndAuthentication() throws Exception { } token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); - searchResp = client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx").get(); - assertEquals(1L, searchResp.getHits().getTotalHits().value); + assertResponse( + client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + ); } public void testCreateDeleteAuthenticate() { @@ -375,9 +379,10 @@ public void testCreateDeleteAuthenticate() { // Index a document with the default test user prepareIndex("idx").setId("1").setSource("body", "foo").setRefreshPolicy(IMMEDIATE).get(); String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); - SearchResponse searchResp = client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx").get(); - - assertEquals(1L, searchResp.getHits().getTotalHits().value); + assertResponse( + client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); assertThat(response.found(), is(true)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index 7e5fd3a8717e2..f34983f7f125c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -442,12 +442,7 @@ private static T expectThrows(Class expectedType, Searc } private static void assertReturnedIndices(SearchRequestBuilder searchRequestBuilder, String... indices) { - var searchResponse = searchRequestBuilder.get(); - try { - assertReturnedIndices(searchResponse, indices); - } finally { - searchResponse.decRef(); - } + assertResponse(searchRequestBuilder, searchResponse -> assertReturnedIndices(searchResponse, indices)); } private static void assertReturnedIndices(SearchResponse searchResponse, String... indices) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index c5da26deaf03d..1b62c79236a9c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -24,6 +24,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -46,29 +47,32 @@ public void testScrollIsPerUser() throws Exception { } indexRandom(true, docs); - SearchResponse response = prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1).get(); - assertEquals(numDocs, response.getHits().getTotalHits().value); - assertEquals(1, response.getHits().getHits().length); - - if (randomBoolean()) { - response = client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)).get(); + assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { assertEquals(numDocs, response.getHits().getTotalHits().value); assertEquals(1, response.getHits().getHits().length); - } - - final String scrollId = response.getScrollId(); - SearchPhaseExecutionException e = expectThrows( - SearchPhaseExecutionException.class, - () -> client().filterWithHeader( - Collections.singletonMap( - "Authorization", - UsernamePasswordToken.basicAuthHeaderValue("other", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) - ) - ).prepareSearchScroll(scrollId).get() - ); - for (ShardSearchFailure failure : e.shardFailures()) { - assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); - } + if (randomBoolean()) { + assertResponse( + client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), + response2 -> { + assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(1, response2.getHits().getHits().length); + } + ); + } + final String scrollId = response.getScrollId(); + SearchPhaseExecutionException e = expectThrows( + SearchPhaseExecutionException.class, + () -> client().filterWithHeader( + Collections.singletonMap( + "Authorization", + UsernamePasswordToken.basicAuthHeaderValue("other", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) + ) + ).prepareSearchScroll(scrollId).get() + ); + for (ShardSearchFailure failure : e.shardFailures()) { + assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); + } + }); } public void testSearchAndClearScroll() throws Exception { @@ -87,12 +91,14 @@ public void testSearchAndClearScroll() throws Exception { do { assertHitCount(response, docs.length); hits += response.getHits().getHits().length; + response.decRef(); response = client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)).get(); } while (response.getHits().getHits().length != 0); assertThat(hits, equalTo(docs.length)); } finally { clearScroll(response.getScrollId()); + response.decRef(); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesSingleNodeTests.java index 5e22641dd8baa..9593bfa5ab723 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesSingleNodeTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.security.operator; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.client.internal.Client; @@ -69,7 +69,7 @@ public void testNormalSuperuserWillFailToCallOperatorOnlyAction() { final ClearVotingConfigExclusionsRequest clearVotingConfigExclusionsRequest = new ClearVotingConfigExclusionsRequest(); final ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> client().execute(ClearVotingConfigExclusionsAction.INSTANCE, clearVotingConfigExclusionsRequest).actionGet() + () -> client().execute(TransportClearVotingConfigExclusionsAction.TYPE, clearVotingConfigExclusionsRequest).actionGet() ); assertThat(e.getCause().getMessage(), containsString("Operator privileges are required for action")); } @@ -92,7 +92,7 @@ public void testNormalSuperuserWillFailToSetOperatorOnlySettings() { public void testOperatorUserWillSucceedToCallOperatorOnlyAction() { final Client client = createOperatorClient(); final ClearVotingConfigExclusionsRequest clearVotingConfigExclusionsRequest = new ClearVotingConfigExclusionsRequest(); - client.execute(ClearVotingConfigExclusionsAction.INSTANCE, clearVotingConfigExclusionsRequest).actionGet(); + client.execute(TransportClearVotingConfigExclusionsAction.TYPE, clearVotingConfigExclusionsRequest).actionGet(); } public void testOperatorUserWillSucceedToSetOperatorOnlySettings() { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index f2268a76221e3..963c42c55aa60 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse; @@ -65,6 +64,7 @@ import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java index 66c5b9fa02ab4..f43275c2d8b70 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.profile; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; @@ -47,6 +46,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.containsString; @@ -368,26 +368,31 @@ public void testDomainCaptureForServiceToken() throws IOException { .get(); } - private void assertAccessToken(CreateTokenResponse createTokenResponse) throws IOException { + private void assertAccessToken(CreateTokenResponse createTokenResponse) { client().filterWithHeader(Map.of("Authorization", "Bearer " + createTokenResponse.getTokenString())) .admin() .cluster() .prepareHealth() .get(); - final SearchResponse searchResponse = prepareSearch(SecuritySystemIndices.SECURITY_TOKENS_ALIAS).get(); - - final String encodedAuthentication = createTokenResponse.getAuthentication().encode(); - for (SearchHit searchHit : searchResponse.getHits().getHits()) { - final XContentTestUtils.JsonMapView responseView = XContentTestUtils.createJsonMapView( - new ByteArrayInputStream(searchHit.getSourceAsString().getBytes(StandardCharsets.UTF_8)) - ); - if (encodedAuthentication.equals(responseView.get("access_token.user_token.authentication"))) { - if (isOtherDomain) { - assertThat(responseView.get("access_token.realm_domain"), equalTo(OTHER_DOMAIN_REALM_MAP)); - } else { - assertThat(responseView.get("access_token.realm_domain"), nullValue()); + assertResponse(prepareSearch(SecuritySystemIndices.SECURITY_TOKENS_ALIAS), searchResponse -> { + final String encodedAuthentication; + try { + encodedAuthentication = createTokenResponse.getAuthentication().encode(); + } catch (IOException e) { + throw new AssertionError(e); + } + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + final XContentTestUtils.JsonMapView responseView = XContentTestUtils.createJsonMapView( + new ByteArrayInputStream(searchHit.getSourceAsString().getBytes(StandardCharsets.UTF_8)) + ); + if (encodedAuthentication.equals(responseView.get("access_token.user_token.authentication"))) { + if (isOtherDomain) { + assertThat(responseView.get("access_token.realm_domain"), equalTo(OTHER_DOMAIN_REALM_MAP)); + } else { + assertThat(responseView.get("access_token.realm_domain"), nullValue()); + } } } - } + }); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 6d7f6fcd3822b..51a902d7e12c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -145,7 +145,6 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; -import org.elasticsearch.xpack.core.security.action.saml.SamlCompleteLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; @@ -161,14 +160,12 @@ import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; @@ -1301,9 +1298,9 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), - new ActionHandler<>(ChangePasswordAction.INSTANCE, TransportChangePasswordAction.class), + new ActionHandler<>(TransportChangePasswordAction.TYPE, TransportChangePasswordAction.class), new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class), - new ActionHandler<>(SetEnabledAction.INSTANCE, TransportSetEnabledAction.class), + new ActionHandler<>(TransportSetEnabledAction.TYPE, TransportSetEnabledAction.class), new ActionHandler<>(HasPrivilegesAction.INSTANCE, TransportHasPrivilegesAction.class), new ActionHandler<>(GetUserPrivilegesAction.INSTANCE, TransportGetUserPrivilegesAction.class), new ActionHandler<>(GetRoleMappingsAction.INSTANCE, TransportGetRoleMappingsAction.class), @@ -1317,7 +1314,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class), new ActionHandler<>(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class), new ActionHandler<>(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class), - new ActionHandler<>(SamlCompleteLogoutAction.INSTANCE, TransportSamlCompleteLogoutAction.class), + new ActionHandler<>(TransportSamlCompleteLogoutAction.TYPE, TransportSamlCompleteLogoutAction.class), new ActionHandler<>(SamlSpMetadataAction.INSTANCE, TransportSamlSpMetadataAction.class), new ActionHandler<>(OpenIdConnectPrepareAuthenticationAction.INSTANCE, TransportOpenIdConnectPrepareAuthenticationAction.class), new ActionHandler<>(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index 997f58a01c0e1..08544d316e87a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; @@ -131,7 +131,9 @@ private void ap Request request, ActionListener listener ) { - if (TransportCloseIndexAction.NAME.equals(action) || OpenIndexAction.NAME.equals(action) || DeleteIndexAction.NAME.equals(action)) { + if (TransportCloseIndexAction.NAME.equals(action) + || OpenIndexAction.NAME.equals(action) + || TransportDeleteIndexAction.TYPE.name().equals(action)) { IndicesRequest indicesRequest = (IndicesRequest) request; try { destructiveOperations.failDestructive(indicesRequest.indices()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java index 0b76af3cf542e..7b45313b0e24f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlCompleteLogoutAction.java @@ -8,13 +8,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.saml.SamlCompleteLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlCompleteLogoutRequest; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.saml.SamlLogoutResponseHandler; @@ -30,17 +30,14 @@ */ public final class TransportSamlCompleteLogoutAction extends HandledTransportAction { + public static final ActionType TYPE = ActionType.emptyResponse( + "cluster:admin/xpack/security/saml/complete_logout" + ); private final Realms realms; @Inject public TransportSamlCompleteLogoutAction(TransportService transportService, ActionFilters actionFilters, Realms realms) { - super( - SamlCompleteLogoutAction.NAME, - transportService, - actionFilters, - SamlCompleteLogoutRequest::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(TYPE.name(), transportService, actionFilters, SamlCompleteLogoutRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.realms = realms; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordRequestBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java similarity index 96% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordRequestBuilder.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java index 927f839f267b4..c5fbd7ca3c397 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordRequestBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.core.security.action.user; +package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequestBuilder; @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.user.User; @@ -36,7 +37,7 @@ public class ChangePasswordRequestBuilder extends ActionRequestBuilder { public ChangePasswordRequestBuilder(ElasticsearchClient client) { - super(client, ChangePasswordAction.INSTANCE, new ChangePasswordRequest()); + super(client, TransportChangePasswordAction.TYPE, new ChangePasswordRequest()); } public ChangePasswordRequestBuilder username(String username) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledRequestBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java similarity index 85% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledRequestBuilder.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java index 1a6d522f46e21..69ebf247ad8fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledRequestBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/SetEnabledRequestBuilder.java @@ -4,12 +4,13 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.core.security.action.user; +package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequest; /** * Request builder for setting a user as enabled or disabled @@ -19,7 +20,7 @@ public class SetEnabledRequestBuilder extends ActionRequestBuilder { public SetEnabledRequestBuilder(ElasticsearchClient client) { - super(client, SetEnabledAction.INSTANCE, new SetEnabledRequest()); + super(client, TransportSetEnabledAction.TYPE, new SetEnabledRequest()); } /** diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 77c6fd6882bc2..fc8f931612907 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; @@ -16,7 +17,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.user.AnonymousUser; @@ -26,6 +26,9 @@ public class TransportChangePasswordAction extends HandledTransportAction { + public static final ActionType TYPE = ActionType.emptyResponse( + "cluster:admin/xpack/security/user/change_password" + ); private final Settings settings; private final NativeUsersStore nativeUsersStore; @@ -36,7 +39,7 @@ public TransportChangePasswordAction( ActionFilters actionFilters, NativeUsersStore nativeUsersStore ) { - super(ChangePasswordAction.NAME, transportService, actionFilters, ChangePasswordRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(TYPE.name(), transportService, actionFilters, ChangePasswordRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.settings = settings; this.nativeUsersStore = nativeUsersStore; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java index 10f58499dd92f..4647ac0cf5f66 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; @@ -16,7 +17,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequest; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.AnonymousUser; @@ -28,6 +28,7 @@ */ public class TransportSetEnabledAction extends HandledTransportAction { + public static final ActionType TYPE = ActionType.emptyResponse("cluster:admin/xpack/security/user/set_enabled"); private final Settings settings; private final SecurityContext securityContext; private final NativeUsersStore usersStore; @@ -40,7 +41,7 @@ public TransportSetEnabledAction( SecurityContext securityContext, NativeUsersStore usersStore ) { - super(SetEnabledAction.NAME, transportService, actionFilters, SetEnabledRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(TYPE.name(), transportService, actionFilters, SetEnabledRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.settings = settings; this.securityContext = securityContext; this.usersStore = usersStore; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index b28680f35e083..e2b9c36c1d0ee 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -83,13 +83,11 @@ import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenRequest; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; @@ -102,6 +100,8 @@ import org.elasticsearch.xpack.core.security.user.InternalUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; +import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditUtil; @@ -290,8 +290,8 @@ public class LoggingAuditTrail implements AuditTrail, ClusterStateListener { PutUserAction.NAME, PutRoleAction.NAME, PutRoleMappingAction.NAME, - SetEnabledAction.NAME, - ChangePasswordAction.NAME, + TransportSetEnabledAction.TYPE.name(), + TransportChangePasswordAction.TYPE.name(), CreateApiKeyAction.NAME, GrantApiKeyAction.NAME, PutPrivilegesAction.NAME, @@ -734,10 +734,10 @@ public void accessGranted( assert PutRoleMappingAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((PutRoleMappingRequest) msg).build(); } else if (msg instanceof SetEnabledRequest) { - assert SetEnabledAction.NAME.equals(action); + assert TransportSetEnabledAction.TYPE.name().equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((SetEnabledRequest) msg).build(); } else if (msg instanceof ChangePasswordRequest) { - assert ChangePasswordAction.NAME.equals(action); + assert TransportChangePasswordAction.TYPE.name().equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((ChangePasswordRequest) msg).build(); } else if (msg instanceof CreateApiKeyRequest) { assert CreateApiKeyAction.NAME.equals(action); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c1fed5540248..b4c154e99b466 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -49,7 +49,6 @@ import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; @@ -84,6 +83,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; +import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -112,7 +112,7 @@ public class RBACEngine implements AuthorizationEngine { private static final Predicate SAME_USER_PRIVILEGE = StringMatcher.of( - ChangePasswordAction.NAME, + TransportChangePasswordAction.TYPE.name(), AuthenticateAction.NAME, HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME, @@ -219,7 +219,7 @@ static boolean checkSameUserPermissions(String action, TransportRequest request, } final boolean sameUsername = authentication.getEffectiveSubject().getUser().principal().equals(username); - if (sameUsername && ChangePasswordAction.NAME.equals(action)) { + if (sameUsername && TransportChangePasswordAction.TYPE.name().equals(action)) { return checkChangePasswordAction(authentication); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java index 2f5f809702ccd..e335bd8583a88 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.security.operator; -import org.elasticsearch.action.admin.cluster.allocation.DeleteDesiredBalanceAction; -import org.elasticsearch.action.admin.cluster.allocation.GetDesiredBalanceAction; -import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; -import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; -import org.elasticsearch.action.admin.cluster.desirednodes.DeleteDesiredNodesAction; +import org.elasticsearch.action.admin.cluster.allocation.TransportDeleteDesiredBalanceAction; +import org.elasticsearch.action.admin.cluster.allocation.TransportGetDesiredBalanceAction; +import org.elasticsearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.TransportClearVotingConfigExclusionsAction; import org.elasticsearch.action.admin.cluster.desirednodes.GetDesiredNodesAction; +import org.elasticsearch.action.admin.cluster.desirednodes.TransportDeleteDesiredNodesAction; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesAction; import org.elasticsearch.action.admin.cluster.node.shutdown.PrevalidateNodeRemovalAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; @@ -34,8 +34,8 @@ public class DefaultOperatorOnlyRegistry implements OperatorOnlyRegistry { public static final Set SIMPLE_ACTIONS = Set.of( - AddVotingConfigExclusionsAction.NAME, - ClearVotingConfigExclusionsAction.NAME, + TransportAddVotingConfigExclusionsAction.TYPE.name(), + TransportClearVotingConfigExclusionsAction.TYPE.name(), PutLicenseAction.NAME, DeleteLicenseAction.NAME, // Autoscaling does not publish its actions to core, literal strings are needed. @@ -50,11 +50,11 @@ public class DefaultOperatorOnlyRegistry implements OperatorOnlyRegistry { // Node removal prevalidation API PrevalidateNodeRemovalAction.NAME, // Desired Nodes API - DeleteDesiredNodesAction.NAME, + TransportDeleteDesiredNodesAction.TYPE.name(), GetDesiredNodesAction.NAME, UpdateDesiredNodesAction.NAME, - GetDesiredBalanceAction.NAME, - DeleteDesiredBalanceAction.NAME + TransportGetDesiredBalanceAction.TYPE.name(), + TransportDeleteDesiredBalanceAction.TYPE.name() ); private final ClusterSettings clusterSettings; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlCompleteLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlCompleteLogoutAction.java index 6a4bcac251461..9c222c7cc88b3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlCompleteLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlCompleteLogoutAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.security.action.saml.SamlCompleteLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlCompleteLogoutRequest; +import org.elasticsearch.xpack.security.action.saml.TransportSamlCompleteLogoutAction; import java.io.IOException; import java.util.List; @@ -82,7 +82,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien samlCompleteLogoutRequest.getValidRequestIds() ); return channel -> client.execute( - SamlCompleteLogoutAction.INSTANCE, + TransportSamlCompleteLogoutAction.TYPE, samlCompleteLogoutRequest, new RestBuilderListener<>(channel) { @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java index b76d2b7cf7c42..68500c4d07e26 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestChangePasswordAction.java @@ -22,9 +22,9 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java index bfa78b60c8b66..f34450cbbe1ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestSetEnabledAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequestBuilder; +import org.elasticsearch.xpack.security.action.user.SetEnabledRequestBuilder; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index a7a0efbbf4aac..9eac5512520b2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -67,7 +67,9 @@ class ServerTransportFilter { * be sent back to the sender. */ void inbound(String action, TransportRequest request, TransportChannel transportChannel, ActionListener listener) { - if (TransportCloseIndexAction.NAME.equals(action) || OpenIndexAction.NAME.equals(action) || DeleteIndexAction.NAME.equals(action)) { + if (TransportCloseIndexAction.NAME.equals(action) + || OpenIndexAction.NAME.equals(action) + || TransportDeleteIndexAction.TYPE.name().equals(action)) { IndicesRequest indicesRequest = (IndicesRequest) request; try { destructiveOperations.failDestructive(indicesRequest.indices()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 721df8867c96a..a2ab6c1864783 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.MockIndicesRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.support.DestructiveOperations; @@ -229,7 +229,7 @@ public void testApplyDestructiveOperations() throws Exception { IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()), randomFrom("*", "_all", "test*") ); - String action = randomFrom(TransportCloseIndexAction.NAME, OpenIndexAction.NAME, DeleteIndexAction.NAME); + String action = randomFrom(TransportCloseIndexAction.NAME, OpenIndexAction.NAME, TransportDeleteIndexAction.TYPE.name()); ActionListener listener = mock(ActionListener.class); Task task = mock(Task.class); User user = new User("username", "r1", "r2"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index eb702ed281014..8743453d33a35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -198,47 +198,51 @@ protected void SearchRequest searchRequest = (SearchRequest) request; searchRequests.add(searchRequest); final SearchHit[] hits = searchFunction.apply(searchRequest); - final SearchResponse response = new SearchResponse( - new SearchResponseSections( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + ActionListener.respondAndRelease( + listener, + (Response) new SearchResponse( + new SearchResponseSections( + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + false, + false, + null, + 1 + ), + "_scrollId1", + 1, + 1, + 0, + 1, null, - null, - false, - false, - null, - 1 - ), - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null + null + ) ); - listener.onResponse((Response) response); } else if (TransportSearchScrollAction.TYPE.name().equals(action.name())) { assertThat(request, instanceOf(SearchScrollRequest.class)); final SearchHit[] hits = new SearchHit[0]; - final SearchResponse response = new SearchResponse( - new SearchResponseSections( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + ActionListener.respondAndRelease( + listener, + (Response) new SearchResponse( + new SearchResponseSections( + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + false, + false, + null, + 1 + ), + "_scrollId1", + 1, + 1, + 0, + 1, null, - null, - false, - false, - null, - 1 - ), - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null + null + ) ); - listener.onResponse((Response) response); } else if (TransportClearScrollAction.NAME.equals(action.name())) { assertThat(request, instanceOf(ClearScrollRequest.class)); ClearScrollRequest scrollRequest = (ClearScrollRequest) request; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java index a3021453ee028..df5cebdf735ac 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilderTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import java.io.IOException; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 47811fe8a3e7e..3385b02147890 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -83,13 +83,11 @@ import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenRequest; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.DeleteServiceAccountTokenRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; -import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; import org.elasticsearch.xpack.core.security.action.user.SetEnabledRequest; import org.elasticsearch.xpack.core.security.audit.logfile.CapturingLogger; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -115,6 +113,8 @@ import org.elasticsearch.xpack.core.security.user.InternalUsers; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.UsernamesField; +import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; +import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditUtil; @@ -1337,7 +1337,7 @@ public void testSecurityConfigChangeEventFormattingForUsers() throws IOException // enable user setEnabledRequest.enabled(true); setEnabledRequest.username(username); - auditTrail.accessGranted(requestId, authentication, SetEnabledAction.NAME, setEnabledRequest, authorizationInfo); + auditTrail.accessGranted(requestId, authentication, TransportSetEnabledAction.TYPE.name(), setEnabledRequest, authorizationInfo); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); String generatedEnableUserAuditEventString = output.get(1); @@ -1362,7 +1362,7 @@ public void testSecurityConfigChangeEventFormattingForUsers() throws IOException // disable user setEnabledRequest.enabled(false); setEnabledRequest.username(username); - auditTrail.accessGranted(requestId, authentication, SetEnabledAction.NAME, setEnabledRequest, authorizationInfo); + auditTrail.accessGranted(requestId, authentication, TransportSetEnabledAction.TYPE.name(), setEnabledRequest, authorizationInfo); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); String generatedDisableUserAuditEventString = output.get(1); @@ -1386,7 +1386,13 @@ public void testSecurityConfigChangeEventFormattingForUsers() throws IOException changePasswordRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); changePasswordRequest.username(username); changePasswordRequest.passwordHash(randomFrom(randomAlphaOfLengthBetween(0, 8).toCharArray(), null)); - auditTrail.accessGranted(requestId, authentication, ChangePasswordAction.NAME, changePasswordRequest, authorizationInfo); + auditTrail.accessGranted( + requestId, + authentication, + TransportChangePasswordAction.TYPE.name(), + changePasswordRequest, + authorizationInfo + ); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); String generatedChangePasswordAuditEventString = output.get(1); @@ -1960,8 +1966,8 @@ public void testSecurityConfigChangedEventSelection() { new Tuple<>(PutUserAction.NAME, new PutUserRequest()), new Tuple<>(PutRoleAction.NAME, new PutRoleRequest()), new Tuple<>(PutRoleMappingAction.NAME, new PutRoleMappingRequest()), - new Tuple<>(SetEnabledAction.NAME, new SetEnabledRequest()), - new Tuple<>(ChangePasswordAction.NAME, new ChangePasswordRequest()), + new Tuple<>(TransportSetEnabledAction.TYPE.name(), new SetEnabledRequest()), + new Tuple<>(TransportChangePasswordAction.TYPE.name(), new ChangePasswordRequest()), new Tuple<>(CreateApiKeyAction.NAME, new CreateApiKeyRequest()), new Tuple<>(GrantApiKeyAction.NAME, new GrantApiKeyRequest()), new Tuple<>(PutPrivilegesAction.NAME, new PutPrivilegesRequest()), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 2031cd4f7685b..25194ca1e0234 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -275,7 +275,7 @@ public void testGetApiKeys() throws Exception { doAnswer(invocationOnMock -> { searchRequest.set((SearchRequest) invocationOnMock.getArguments()[0]); ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); + ActionListener.respondAndRelease(listener, SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); String[] realmNames = generateRandomStringArray(4, 4, true, true); @@ -336,7 +336,7 @@ public void testInvalidateApiKeys() throws Exception { doAnswer(invocationOnMock -> { searchRequest.set((SearchRequest) invocationOnMock.getArguments()[0]); ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); + ActionListener.respondAndRelease(listener, SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); PlainActionFuture listener = new PlainActionFuture<>(); @@ -427,17 +427,10 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { null, 0 ); - final var searchResponse = new SearchResponse( - internalSearchResponse, - randomAlphaOfLengthBetween(3, 8), - 1, - 1, - 0, - 10, - null, - null + ActionListener.respondAndRelease( + listener, + new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) ); - listener.onResponse(searchResponse); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); @@ -756,33 +749,35 @@ public void testCrossClusterApiKeyUsageStats() { final AtomicReference searchRequest = new AtomicReference<>(); doAnswer(invocationOnMock -> { searchRequest.set(invocationOnMock.getArgument(0)); - final var searchResponse = new SearchResponse( - new InternalSearchResponse( - new SearchHits( - searchHits.toArray(SearchHit[]::new), - new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), - randomFloat(), + final ActionListener listener = invocationOnMock.getArgument(1); + ActionListener.respondAndRelease( + listener, + new SearchResponse( + new InternalSearchResponse( + new SearchHits( + searchHits.toArray(SearchHit[]::new), + new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), + randomFloat(), + null, + null, + null + ), + null, null, null, - null + false, + null, + 0 ), + randomAlphaOfLengthBetween(3, 8), + 1, + 1, + 0, + 10, null, - null, - null, - false, - null, - 0 - ), - randomAlphaOfLengthBetween(3, 8), - 1, - 1, - 0, - 10, - null, - null + null + ) ); - final ActionListener listener = invocationOnMock.getArgument(1); - listener.onResponse(searchResponse); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 71b91619c66b3..772512a7f69d0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; @@ -137,7 +137,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(index), is(false)); @@ -152,7 +152,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(profilingIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(profilingIndex), is(true)); @@ -166,7 +166,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(index), is(true)); @@ -195,7 +195,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(dotFleetIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(dotFleetIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(dotFleetIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(dotFleetIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(dotFleetIndex), is(false)); }); @@ -210,7 +210,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(dotFleetSecretsIndex), is(false)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(dotFleetSecretsIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(dotFleetSecretsIndex), is(false)); @@ -229,7 +229,7 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(apmSampledTracesIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(apmSampledTracesIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(apmSampledTracesIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(apmSampledTracesIndex), is(false)); final String privilegeName = randomAlphaOfLengthBetween(3, 16); @@ -367,7 +367,7 @@ public void testElasticEnterpriseSearchServerAccount() { assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(enterpriseSearchIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java index 2dec4eb8ea2b5..8d5d89b4c5054 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java @@ -280,18 +280,10 @@ public void testFindTokensFor() { null, 0 ); - - final SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - randomAlphaOfLengthBetween(3, 8), - 1, - 1, - 0, - 10, - null, - null + ActionListener.respondAndRelease( + l, + new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) ); - l.onResponse(searchResponse); } else if (r instanceof ClearScrollRequest) { l.onResponse(new ClearScrollResponse(true, 1)); } else { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index bcb335c7cf9bc..3b52f86c00ba8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -371,17 +371,10 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi null, 0 ); - final var searchResponse = new SearchResponse( - internalSearchResponse, - randomAlphaOfLengthBetween(3, 8), - 1, - 1, - 0, - 10, - null, - null + ActionListener.respondAndRelease( + listener, + new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) ); - listener.onResponse(searchResponse); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index f5f700c8dc7c2..4cabe5a8ec3ba 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; @@ -871,14 +871,14 @@ public void testRemoteIndicesOnlyWorkWithApplicableRequestTypes() { mockEmptyMetadata(); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); assertThrowsAuthorizationException( - () -> authorize(authentication, DeleteIndexAction.NAME, request), - DeleteIndexAction.NAME, + () -> authorize(authentication, TransportDeleteIndexAction.TYPE.name(), request), + TransportDeleteIndexAction.TYPE.name(), "test user" ); verify(auditTrail).accessDenied( eq(requestId), eq(authentication), - eq(DeleteIndexAction.NAME), + eq(TransportDeleteIndexAction.TYPE.name()), eq(request), authzInfoRoles(Role.EMPTY.names()) ); @@ -2277,7 +2277,10 @@ public void testSuperusersCannotExecuteWriteOperationAgainstSecurityIndex() { new Tuple<>(PutMappingAction.NAME, new PutMappingRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) ); requests.add( - new Tuple<>(DeleteIndexAction.NAME, new DeleteIndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) + new Tuple<>( + TransportDeleteIndexAction.TYPE.name(), + new DeleteIndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7)) + ) ); for (final Tuple requestTuple : requests) { final String action = requestTuple.v1(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 7defd0f11bfac..45838e75940b4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -1541,7 +1541,10 @@ public void testNonRemotableRequestDoesNotAllowRemoteIndices() { new CloseIndexRequest("remote:foo").indicesOptions(options), TransportCloseIndexAction.NAME ), - new Tuple(new DeleteIndexRequest("remote:foo").indicesOptions(options), DeleteIndexAction.NAME), + new Tuple( + new DeleteIndexRequest("remote:foo").indicesOptions(options), + TransportDeleteIndexAction.TYPE.name() + ), new Tuple(new PutMappingRequest("remote:foo").indicesOptions(options), PutMappingAction.NAME) ); IndexNotFoundException e = expectThrows( @@ -1555,7 +1558,10 @@ public void testNonRemotableRequestDoesNotAllowRemoteWildcardIndices() { IndicesOptions options = IndicesOptions.fromOptions(randomBoolean(), true, true, true); Tuple tuple = randomFrom( new Tuple(new CloseIndexRequest("*:*").indicesOptions(options), TransportCloseIndexAction.NAME), - new Tuple(new DeleteIndexRequest("*:*").indicesOptions(options), DeleteIndexAction.NAME), + new Tuple( + new DeleteIndexRequest("*:*").indicesOptions(options), + TransportDeleteIndexAction.TYPE.name() + ), new Tuple(new PutMappingRequest("*:*").indicesOptions(options), PutMappingAction.NAME) ); final ResolvedIndices resolved = resolveIndices(tuple.v1(), buildAuthorizedIndices(user, tuple.v2())); @@ -1576,7 +1582,7 @@ public void testCompositeIndicesRequestIsNotSupported() { } public void testResolveAdminAction() { - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, DeleteIndexAction.NAME); + final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, TransportDeleteIndexAction.TYPE.name()); { RefreshRequest request = new RefreshRequest("*"); List indices = resolveIndices(request, authorizedIndices).getLocal(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 3540f0bd6a753..753f498e2fb90 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -41,9 +41,7 @@ import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; @@ -98,6 +96,8 @@ import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; +import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.RBACEngine.RBACAuthorizationInfo; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -247,7 +247,7 @@ public void testSameUserPermission() { final TransportRequest request = changePasswordRequest ? new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : new HasPrivilegesRequestBuilder(mock(Client.class)).username(user.principal()).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : HasPrivilegesAction.NAME; + final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : HasPrivilegesAction.NAME; final Authentication.RealmRef authenticatedBy = new Authentication.RealmRef( randomAlphaOfLengthBetween(3, 8), changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : randomAlphaOfLengthBetween(4, 12), @@ -267,7 +267,7 @@ public void testSameUserPermissionDoesNotAllowNonMatchingUsername() { final TransportRequest request = changePasswordRequest ? new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : new HasPrivilegesRequestBuilder(mock(Client.class)).username(username).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : HasPrivilegesAction.NAME; + final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : HasPrivilegesAction.NAME; final Authentication.RealmRef authenticatedBy = new Authentication.RealmRef( randomAlphaOfLengthBetween(3, 8), @@ -331,7 +331,7 @@ public void testSameUserPermissionRunAsChecksAuthenticatedBy() { final TransportRequest request = changePasswordRequest ? new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : new HasPrivilegesRequestBuilder(mock(Client.class)).username(username).request(); - final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : AuthenticateAction.NAME; final Authentication.RealmRef authenticatedBy = AuthenticationTestHelper.randomRealmRef(false); final Authentication.RealmRef lookedUpBy = new Authentication.RealmRef( @@ -367,7 +367,7 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() { final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username( authentication.getEffectiveSubject().getUser().principal() ).request(); - final String action = ChangePasswordAction.NAME; + final String action = TransportChangePasswordAction.TYPE.name(); assertThat(request, instanceOf(UserRequest.class)); assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication)); @@ -378,7 +378,7 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForApiKey() { final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username( authentication.getEffectiveSubject().getUser().principal() ).request(); - final String action = ChangePasswordAction.NAME; + final String action = TransportChangePasswordAction.TYPE.name(); assertThat(request, instanceOf(UserRequest.class)); assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication)); @@ -389,7 +389,7 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForAccessToken() { final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username( authentication.getEffectiveSubject().getUser().principal() ).request(); - final String action = ChangePasswordAction.NAME; + final String action = TransportChangePasswordAction.TYPE.name(); assertThat(request, instanceOf(UserRequest.class)); assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication)); @@ -416,7 +416,7 @@ public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRe final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username( authentication.getEffectiveSubject().getUser().principal() ).request(); - final String action = ChangePasswordAction.NAME; + final String action = TransportChangePasswordAction.TYPE.name(); assertThat(request, instanceOf(UserRequest.class)); assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 53df6e6157282..d229124419cb2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -134,7 +134,7 @@ protected void @Override public void searchScroll(SearchScrollRequest request, ActionListener listener) { - listener.onResponse(SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); + ActionListener.respondAndRelease(listener, SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY)); } }; securityIndex = mock(SecurityIndexManager.class); @@ -189,7 +189,7 @@ public void testGetSinglePrivilegeByName() throws Exception { {"term":{"type":{"value":"application-privilege\"""")); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertResult(sourcePrivileges, future); } @@ -198,7 +198,7 @@ public void testGetMissingPrivilege() throws InterruptedException, ExecutionExce final PlainActionFuture> future = new PlainActionFuture<>(); store.getPrivileges(List.of("myapp"), List.of("admin"), future); final SearchHit[] hits = new SearchHit[0]; - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); final Collection applicationPrivilegeDescriptors = future.get(1, TimeUnit.SECONDS); assertThat(applicationPrivilegeDescriptors, empty()); @@ -225,7 +225,7 @@ public void testGetPrivilegesByApplicationName() throws Exception { {"term":{"type":{"value":"application-privilege\"""")); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertResult(sourcePrivileges, future); } @@ -283,7 +283,7 @@ public void testGetPrivilegesByWildcardApplicationName() throws Exception { } final SearchHit[] hits = buildHits(allowExpensiveQueries ? sourcePrivileges.subList(1, 4) : sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); // The first and last privilege should not be retrieved assertResult(sourcePrivileges.subList(1, 4), future); } @@ -300,7 +300,7 @@ public void testGetPrivilegesByStarApplicationName() throws Exception { assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); final SearchHit[] hits = new SearchHit[0]; - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); } public void testGetAllPrivileges() throws Exception { @@ -321,7 +321,7 @@ public void testGetAllPrivileges() throws Exception { assertThat(query, not(containsString("{\"terms\""))); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertResult(sourcePrivileges, future); } @@ -337,7 +337,7 @@ public void testGetPrivilegesCacheByApplicationNames() throws Exception { store.getPrivileges(List.of("myapp", "yourapp"), null, future); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertEquals(Set.of("myapp"), store.getApplicationNamesCache().get(Set.of("myapp", "yourapp"))); assertEquals(Set.copyOf(sourcePrivileges), store.getDescriptorsCache().get("myapp")); @@ -369,7 +369,7 @@ public void testGetPrivilegesCacheWithApplicationAndPrivilegeName() throws Excep store.getPrivileges(Collections.singletonList("myapp"), singletonList("user"), future); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); // Not caching names with no wildcard assertNull(store.getApplicationNamesCache().get(singleton("myapp"))); @@ -388,7 +388,7 @@ public void testGetPrivilegesCacheWithNonExistentApplicationName() throws Except final PlainActionFuture> future = new PlainActionFuture<>(); store.getPrivileges(Collections.singletonList("no-such-app"), null, future); final SearchHit[] hits = buildHits(emptyList()); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertEquals(emptySet(), store.getApplicationNamesCache().get(singleton("no-such-app"))); assertEquals(0, store.getDescriptorsCache().count()); @@ -405,7 +405,7 @@ public void testGetPrivilegesCacheWithDifferentMatchAllApplicationNames() throws final PlainActionFuture> future = new PlainActionFuture<>(); store.getPrivileges(emptyList(), null, future); final SearchHit[] hits = buildHits(emptyList()); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertEquals(emptySet(), store.getApplicationNamesCache().get(singleton("*"))); assertEquals(1, store.getApplicationNamesCache().count()); assertResult(emptyList(), future); @@ -442,7 +442,7 @@ public void testCacheIsClearedByApplicationNameWhenPrivilegesAreModified() throw new ApplicationPrivilegeDescriptor("app2", "priv2b", Set.of("action:2b"), Map.of()) ); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertEquals(Set.of("app1", "app2"), store.getApplicationNamesCache().get(singleton("*"))); assertResult(sourcePrivileges, getFuture); @@ -505,7 +505,7 @@ public void testStaleResultsWillNotBeCached() { // Before the results can be cached, invalidate the cache to simulate stale search results store.getDescriptorsAndApplicationNamesCache().invalidateAll(); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); // Nothing should be cached since the results are stale assertEquals(0, store.getApplicationNamesCache().count()); @@ -553,7 +553,7 @@ protected void cacheFetchedDescriptors( final PlainActionFuture> future = new PlainActionFuture<>(); store1.getPrivileges(null, null, future); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); // Make sure the caching is about to happen getPrivilegeCountDown.await(5, TimeUnit.SECONDS); @@ -779,7 +779,7 @@ public void testGetPrivilegesWorkWithoutCache() throws Exception { final PlainActionFuture> future = new PlainActionFuture<>(); store1.getPrivileges(singletonList("myapp"), null, future); final SearchHit[] hits = buildHits(sourcePrivileges); - listener.get().onResponse(buildSearchResponse(hits)); + ActionListener.respondAndRelease(listener.get(), buildSearchResponse(hits)); assertResult(sourcePrivileges, future); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 9bd5d416940d3..57e48581d159c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; @@ -568,7 +568,7 @@ public void testProfileSecuredRequestHandlerDecrementsRefCountOnFailure() throws final SecurityServerTransportInterceptor.ProfileSecuredRequestHandler requestHandler = new SecurityServerTransportInterceptor.ProfileSecuredRequestHandler<>( logger, - DeleteIndexAction.NAME, + TransportDeleteIndexAction.TYPE.name(), randomBoolean(), threadPool.executor(randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC), (request, channel, task) -> fail("should fail at destructive operations check to trigger listener failure"), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java index 1ecd85cadab46..f6e5601c75c6a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.MockIndicesRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.DestructiveOperations; @@ -188,7 +188,7 @@ public void testCrossClusterAccessInboundMissingHeadersFail() { } public void testInboundDestructiveOperations() { - String action = randomFrom(TransportCloseIndexAction.NAME, OpenIndexAction.NAME, DeleteIndexAction.NAME); + String action = randomFrom(TransportCloseIndexAction.NAME, OpenIndexAction.NAME, TransportDeleteIndexAction.TYPE.name()); TransportRequest request = new MockIndicesRequest( IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()), randomFrom("*", "_all", "test*") diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index 684100b45a743..e8d06e6f8cbe2 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodesHelper; @@ -421,6 +422,21 @@ public void testNodeShutdownWithUnassignedShards() throws Exception { assertBusy(() -> assertNodeShutdownStatus(nodeAId, STALLED)); } + public void testRemoveNodeWaitsForAutoExpandReplicas() throws Exception { + final var nodes = internalCluster().startNodes(2); + final var indexName = randomIdentifier(); + createIndex(indexName, indexSettings(1, 0).put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1").build()); + ensureGreen(indexName); + + final var nodeToShutdownName = randomFrom(nodes); + final var nodeToShutdownId = getNodeId(nodeToShutdownName); + putNodeShutdown(nodeToShutdownId, SingleNodeShutdownMetadata.Type.REMOVE, null); + assertBusy(() -> assertNodeShutdownStatus(nodeToShutdownId, COMPLETE)); + internalCluster().stopNode(nodeToShutdownName); + + ensureGreen(indexName); + } + private void indexRandomData(String index) throws Exception { int numDocs = scaledRandomIntBetween(100, 1000); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index 7946bb7e46627..fcd70d5c215f1 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.shutdown; -import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -20,12 +19,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; -import org.elasticsearch.cluster.routing.RerouteService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; @@ -40,10 +37,12 @@ import java.util.Objects; import java.util.function.Predicate; +import static org.elasticsearch.cluster.routing.allocation.allocator.AllocationActionListener.rerouteCompletionIsNotRequired; + public class TransportPutShutdownNodeAction extends AcknowledgedTransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportPutShutdownNodeAction.class); - private final RerouteService rerouteService; + private final AllocationService allocationService; private final MasterServiceTaskQueue taskQueue; private final PutShutdownNodeExecutor executor = new PutShutdownNodeExecutor(); @@ -81,38 +80,6 @@ private static boolean putShutdownNodeState( return true; } - private static void ackAndMaybeReroute(Request request, ActionListener listener, RerouteService rerouteService) { - boolean shouldReroute = switch (request.getType()) { - case REMOVE, SIGTERM, REPLACE -> true; - default -> false; - }; - - if (shouldReroute) { - rerouteService.reroute("node registered for removal from cluster", Priority.URGENT, new ActionListener<>() { - @Override - public void onResponse(Void ignored) {} - - @Override - public void onFailure(Exception e) { - logger.log( - MasterService.isPublishFailureException(e) ? Level.DEBUG : Level.WARN, - () -> "failed to reroute after registering node [" + request.getNodeId() + "] for shutdown", - e - ); - } - }); - } else { - logger.trace( - () -> "not starting reroute after registering node [" - + request.getNodeId() - + "] for shutdown of type [" - + request.getType() - + "]" - ); - } - listener.onResponse(AcknowledgedResponse.TRUE); - } - // package private for tests record PutShutdownNodeTask(Request request, ActionListener listener) implements ClusterStateTaskListener { @Override @@ -130,6 +97,7 @@ public ClusterState execute(BatchExecutionContext batchExec var shutdownMetadata = new HashMap<>(initialState.metadata().nodeShutdowns().getAll()); Predicate nodeExistsPredicate = batchExecutionContext.initialState().getNodes()::nodeExists; boolean changed = false; + boolean needsReroute = false; for (final var taskContext : batchExecutionContext.taskContexts()) { var request = taskContext.getTask().request(); try (var ignored = taskContext.captureResponseHeaders()) { @@ -138,17 +106,34 @@ public ClusterState execute(BatchExecutionContext batchExec taskContext.onFailure(e); continue; } - taskContext.success(() -> ackAndMaybeReroute(request, taskContext.getTask().listener(), rerouteService)); + switch (request.getType()) { + case REMOVE, SIGTERM, REPLACE -> needsReroute = true; + } + taskContext.success(() -> { + logger.trace( + () -> "finished registering node [" + request.getNodeId() + "] for shutdown of type [" + request.getType() + "]" + ); + taskContext.getTask().listener.onResponse(AcknowledgedResponse.TRUE); + }); } if (changed == false) { - return batchExecutionContext.initialState(); + return initialState; } - return ClusterState.builder(batchExecutionContext.initialState()) - .metadata( - Metadata.builder(batchExecutionContext.initialState().metadata()) - .putCustom(NodesShutdownMetadata.TYPE, new NodesShutdownMetadata(shutdownMetadata)) - ) - .build(); + + final var updatedState = initialState.copyAndUpdateMetadata( + b -> b.putCustom(NodesShutdownMetadata.TYPE, new NodesShutdownMetadata(shutdownMetadata)) + ); + + if (needsReroute == false) { + return updatedState; + } + + // Reroute inline with the update, rather than using the RerouteService, in order to atomically update things like auto-expand + // replicas to account for the shutdown metadata. If the reroute were separate then the get-shutdown API might observe the + // intermediate state and report that nodes are ready to shut down prematurely. Even if the client were to wait for the + // put-shutdown API to complete there's a risk that it gets disconnected and retries, but the retry could well be a no-op which + // short-circuits past the cluster state update and therefore also doesn't wait for the background reroute. + return allocationService.reroute(updatedState, "reroute after put-shutdown", rerouteCompletionIsNotRequired()); } } @@ -156,7 +141,7 @@ public ClusterState execute(BatchExecutionContext batchExec public TransportPutShutdownNodeAction( TransportService transportService, ClusterService clusterService, - RerouteService rerouteService, + AllocationService allocationService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver @@ -172,7 +157,7 @@ public TransportPutShutdownNodeAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.rerouteService = rerouteService; + this.allocationService = allocationService; taskQueue = clusterService.createTaskQueue("put-shutdown", Priority.URGENT, new PutShutdownNodeExecutor()); } diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java index 1ea85f4ef07cf..d3f13a343df3c 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor.TaskContext; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type; -import org.elasticsearch.cluster.routing.RerouteService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.core.TimeValue; @@ -39,6 +39,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.clearInvocations; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -64,7 +65,8 @@ public void init() { var threadPool = mock(ThreadPool.class); var transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); clusterService = mock(ClusterService.class); - var rerouteService = mock(RerouteService.class); + var allocationService = mock(AllocationService.class); + when(allocationService.reroute(any(ClusterState.class), anyString(), any())).then(invocation -> invocation.getArgument(0)); var actionFilters = mock(ActionFilters.class); var indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); when(clusterService.createTaskQueue(any(), any(), Mockito.>any())).thenReturn( @@ -73,7 +75,7 @@ public void init() { action = new TransportPutShutdownNodeAction( transportService, clusterService, - rerouteService, + allocationService, threadPool, actionFilters, indexNameExpressionResolver diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SnapshotLifecycleInitialisationTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SnapshotLifecycleInitialisationTests.java index 65666c6dd7fdb..6d0cd2142fe6e 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SnapshotLifecycleInitialisationTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SnapshotLifecycleInitialisationTests.java @@ -6,8 +6,8 @@ */ package org.elasticsearch.xpack.slm; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -60,7 +60,7 @@ protected Collection> getPlugins() { public void testSLMIsInRunningModeWhenILMIsDisabled() throws Exception { client().execute( - PutRepositoryAction.INSTANCE, + TransportPutRepositoryAction.TYPE, new PutRepositoryRequest().name("repo") .type("fs") .settings(Settings.builder().put("repositories.fs.location", repositoryLocation).build()) diff --git a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java index 25cea3b3f6e0a..75588baf6e6f4 100644 --- a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java +++ b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java @@ -182,6 +182,38 @@ public void testNestedFields() throws IOException { verifyEcsMappings(indexName); } + public void testNumericMessage() throws IOException { + String indexName = "test-numeric-message"; + createTestIndex(indexName); + Map fieldsMap = createTestDocument(false); + fieldsMap.put("message", 123); // Should be mapped as match_only_text + indexDocument(indexName, fieldsMap); + verifyEcsMappings(indexName); + } + + public void testUsage() throws IOException { + String indexName = "test-usage"; + createTestIndex(indexName); + Map fieldsMap = createTestDocument(false); + // Only non-root numeric (or coercable to numeric) "usage" fields should match + // ecs_usage_*_scaled_float; root fields and intermediate object fields should not match. + fieldsMap.put("host.cpu.usage", 123); // should be mapped as scaled_float + fieldsMap.put("string.usage", "123"); // should also be mapped as scale_float + fieldsMap.put("usage", 123); + fieldsMap.put("root.usage.long", 123); + fieldsMap.put("root.usage.float", 123.456); + indexDocument(indexName, fieldsMap); + + final Map rawMappings = getMappings(indexName); + final Map flatFieldMappings = new HashMap<>(); + processRawMappingsSubtree(rawMappings, flatFieldMappings, new HashMap<>(), ""); + assertEquals("scaled_float", flatFieldMappings.get("host.cpu.usage")); + assertEquals("scaled_float", flatFieldMappings.get("string.usage")); + assertEquals("long", flatFieldMappings.get("usage")); + assertEquals("long", flatFieldMappings.get("root.usage.long")); + assertEquals("float", flatFieldMappings.get("root.usage.float")); + } + private static void indexDocument(String indexName, Map flattenedFieldsMap) throws IOException { try (XContentBuilder bodyBuilder = JsonXContent.contentBuilder()) { Request indexRequest = new Request("POST", "/" + indexName + "/_doc"); diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 8dc8238b8230b..eb7e3eb91fe0f 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -43,7 +43,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 4; + public static final int REGISTRY_VERSION = 5; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java index d05acc7a7b368..dde0698056ab2 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCCSCanMatchIT.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -71,9 +70,11 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyList; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -169,12 +170,12 @@ private int createIndexAndIndexDocs(String cluster, String index, int numberOfSh return numDocs; } - public void testSearchAction_MatchAllQuery() { + public void testSearchAction_MatchAllQuery() throws ExecutionException, InterruptedException { testSearchAction(QueryBuilders.matchAllQuery(), true, localOldDocs + localNewDocs + remoteOldDocs + remoteNewDocs, 0); testSearchAction(QueryBuilders.matchAllQuery(), false, localOldDocs + localNewDocs + remoteOldDocs + remoteNewDocs, 0); } - public void testSearchAction_RangeQuery() { + public void testSearchAction_RangeQuery() throws ExecutionException, InterruptedException { testSearchAction( QueryBuilders.rangeQuery("@timestamp").from(timestamp), // This query only matches new documents true, @@ -189,7 +190,7 @@ public void testSearchAction_RangeQuery() { ); } - public void testSearchAction_RangeQueryThatMatchesNoShards() { + public void testSearchAction_RangeQueryThatMatchesNoShards() throws ExecutionException, InterruptedException { testSearchAction( QueryBuilders.rangeQuery("@timestamp").from(100_000_000), // This query matches no documents true, @@ -206,17 +207,19 @@ public void testSearchAction_RangeQueryThatMatchesNoShards() { ); } - private void testSearchAction(QueryBuilder query, boolean ccsMinimizeRoundtrips, long expectedHitCount, int expectedSkippedShards) { + private void testSearchAction(QueryBuilder query, boolean ccsMinimizeRoundtrips, long expectedHitCount, int expectedSkippedShards) + throws ExecutionException, InterruptedException { SearchSourceBuilder source = new SearchSourceBuilder().query(query); SearchRequest request = new SearchRequest("local_*", "*:remote_*"); request.source(source).setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips); - SearchResponse response = client().search(request).actionGet(); - ElasticsearchAssertions.assertHitCount(response, expectedHitCount); - int expectedTotalShards = oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards; - assertThat("Response was: " + response, response.getTotalShards(), is(equalTo(expectedTotalShards))); - assertThat("Response was: " + response, response.getSuccessfulShards(), is(equalTo(expectedTotalShards))); - assertThat("Response was: " + response, response.getFailedShards(), is(equalTo(0))); - assertThat("Response was: " + response, response.getSkippedShards(), is(equalTo(expectedSkippedShards))); + assertResponse(client().search(request), response -> { + ElasticsearchAssertions.assertHitCount(response, expectedHitCount); + int expectedTotalShards = oldLocalNumShards + newLocalNumShards + oldRemoteNumShards + newRemoteNumShards; + assertThat("Response was: " + response, response.getTotalShards(), is(equalTo(expectedTotalShards))); + assertThat("Response was: " + response, response.getSuccessfulShards(), is(equalTo(expectedTotalShards))); + assertThat("Response was: " + response, response.getFailedShards(), is(equalTo(0))); + assertThat("Response was: " + response, response.getSkippedShards(), is(equalTo(expectedSkippedShards))); + }); } public void testGetCheckpointAction_MatchAllQuery() throws InterruptedException { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index 1c2f83c38a38e..d96ba88faff9a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -10,8 +10,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -158,7 +158,7 @@ private void deleteDestinationIndex( config.getHeaders(), TRANSFORM_ORIGIN, client, - DeleteIndexAction.INSTANCE, + TransportDeleteIndexAction.TYPE, deleteDestIndexRequest, deleteDestIndexListener ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index e597254bfe713..ee394c7a128b4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -12,8 +12,8 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -179,7 +179,7 @@ private void deleteDestinationIndexIfCreatedByTheTransform( } String destIndex = transformConfigAndVersionHolder.get().v1().getDestination().getIndex(); DeleteIndexRequest deleteDestIndexRequest = new DeleteIndexRequest(destIndex); - executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, DeleteIndexAction.INSTANCE, deleteDestIndexRequest, finalListener); + executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, TransportDeleteIndexAction.TYPE, deleteDestIndexRequest, finalListener); }, listener::onFailure); // <2> Check if the destination index was created by transform diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index e87cf1ca7fc8d..b3cdea8ee80d8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -15,8 +15,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -304,7 +304,7 @@ public void deleteOldIndices(ActionListener listener) { IndicesOptions.LENIENT_EXPAND_OPEN ); - executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, DeleteIndexAction.INSTANCE, deleteRequest, ActionListener.wrap(response -> { + executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, TransportDeleteIndexAction.TYPE, deleteRequest, ActionListener.wrap(response -> { if (response.isAcknowledged() == false) { listener.onFailure(new ElasticsearchStatusException("Failed to delete internal indices", RestStatus.INTERNAL_SERVER_ERROR)); return; diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 84f608b91dc95..41e23b54b0375 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -34,6 +34,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockMustacheScriptEngine; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -312,8 +313,7 @@ protected long watchRecordCount(QueryBuilder query) { } protected long docCount(String index, SearchSourceBuilder source) { - SearchRequestBuilder builder = prepareSearch(index).setSource(source).setSize(0); - return builder.get().getHits().getTotalHits().value; + return SearchResponseUtils.getTotalHitsValue(prepareSearch(index).setSource(source).setSize(0)); } protected SearchResponse searchHistory(SearchSourceBuilder builder) { @@ -416,16 +416,10 @@ protected SearchResponse searchWatchRecords(Consumer reque protected long findNumberOfPerformedActions(String watchName) { refresh(); - SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions( - IndicesOptions.lenientExpandOpen() - ).setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))).get(); - long totalHistsValue; - try { - totalHistsValue = searchResponse.getHits().getTotalHits().value; - } finally { - searchResponse.decRef(); - } - return totalHistsValue; + return SearchResponseUtils.getTotalHitsValue( + prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))) + ); } protected void assertWatchWithNoActionNeeded(final String watchName, final long expectedWatchActionsWithNoActionNeeded) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java index 49fd7218ed066..1308597b7bcf9 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.core.TimeValue; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; @@ -234,7 +235,7 @@ public void testAckWithRestart() throws Exception { assertThat(ackResponse.getStatus().actionStatus("_id").ackStatus().state(), is(ActionStatus.AckStatus.State.ACKED)); refresh("actions"); - long countAfterAck = prepareSearch("actions").setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + long countAfterAck = SearchResponseUtils.getTotalHitsValue(prepareSearch("actions").setQuery(matchAllQuery())); assertThat(countAfterAck, greaterThanOrEqualTo(1L)); restartWatcherRandomly(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 14579fd50b8e2..fc1d200c91b82 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -68,6 +68,7 @@ import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.input.none.NoneInput; +import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.transform.TransformRegistry; import org.elasticsearch.xpack.core.watcher.transport.actions.QueryWatchesAction; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; @@ -533,6 +534,7 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) listener = new WatcherIndexingListener(watchParser, getClock(), triggerService, watcherLifeCycleService.getState()); clusterService.addListener(listener); + logger.info("Watcher initialized components at {}", WatcherDateTimeUtils.dateTimeFormatter.formatMillis(getClock().millis())); // note: clock is needed here until actions can be constructed directly instead of by guice return Arrays.asList( new ClockHolder(getClock()), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 0472722bd80a2..ea9295600fe41 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -385,12 +385,14 @@ private Collection loadWatches(ClusterState clusterState) { } SearchScrollRequest request = new SearchScrollRequest(response.getScrollId()); request.scroll(scrollTimeout); + response.decRef(); response = client.searchScroll(request).actionGet(defaultSearchTimeout); } } finally { if (response != null) { ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(response.getScrollId()); + response.decRef(); client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index d6531fb8f145a..ba07c3137340d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.trigger.schedule.Schedule; @@ -59,6 +60,7 @@ public TickerScheduleTriggerEngine(Settings settings, ScheduleRegistry scheduleR @Override public synchronized void start(Collection jobs) { long startTime = clock.millis(); + logger.info("Watcher starting watches at {}", WatcherDateTimeUtils.dateTimeFormatter.formatMillis(startTime)); Map startingSchedules = Maps.newMapWithExpectedSize(jobs.size()); for (Watch job : jobs) { if (job.trigger() instanceof ScheduleTrigger trigger) { @@ -154,6 +156,11 @@ static class ActiveSchedule { this.schedule = schedule; this.startTime = startTime; this.scheduledTime = schedule.nextScheduledTimeAfter(startTime, startTime); + logger.debug( + "Watcher: activating schedule for watch '{}', first run at {}", + name, + WatcherDateTimeUtils.dateTimeFormatter.formatMillis(scheduledTime) + ); } /**