diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java index 09d32d79a508c..7ea9b41ca32f9 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java @@ -89,7 +89,6 @@ private Object[] getTargets(String projectPath) { return new String[] { "src/*/java/org/elasticsearch/action/admin/cluster/repositories/**/*.java", "src/*/java/org/elasticsearch/action/admin/cluster/snapshots/**/*.java", - "src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java", "src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java", "src/*/java/org/elasticsearch/index/IndexMode.java", "src/*/java/org/elasticsearch/index/IndexRouting.java", @@ -98,6 +97,10 @@ private Object[] getTargets(String projectPath) { "src/*/java/org/elasticsearch/repositories/**/*.java", "src/*/java/org/elasticsearch/search/aggregations/**/*.java", "src/*/java/org/elasticsearch/snapshots/**/*.java" }; + } else if (projectPath.equals(":test:framework")) { + return new String[] { + "src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java", + }; } else { // Normally this isn"t necessary, but we have Java sources in // non-standard places @@ -203,7 +206,6 @@ private Object[] getTargets(String projectPath) { ":test:fixtures:geoip-fixture", ":test:fixtures:krb5kdc-fixture", ":test:fixtures:old-elasticsearch", - ":test:framework", ":test:logger-usage", ":x-pack:docs", ":x-pack:license-tools", diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java index 4f07187f7fc38..45b6b1d142963 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java @@ -36,7 +36,7 @@ import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; - +import org.gradle.api.model.ObjectFactory; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.BufferedWriter; @@ -51,6 +51,8 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import javax.inject.Inject; +import java.io.Serializable; /** * Checks files for license headers.. @@ -95,10 +97,6 @@ public List getExcludes() { return excludes; } - public Map getAdditionalLicenses() { - return additionalLicenses; - } - public void setExcludes(List excludes) { this.excludes = excludes; } @@ -106,6 +104,11 @@ public void setExcludes(List excludes) { @OutputFile private File reportFile = new File(getProject().getBuildDir(), "reports/licenseHeaders/rat.xml"); + private static List conventionalLicenses = Arrays.asList( + // Dual SSPLv1 and Elastic + new License("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server") + ); + /** * Allowed license families for this project. */ @@ -118,13 +121,17 @@ public void setExcludes(List excludes) { */ @Input private List excludes = new ArrayList(); + + private ListProperty additionalLicenses; + /** * Additional license families that may be found. The key is the license category name (5 characters), * followed by the family name and the value list of patterns to search for. */ @Input - protected Map additionalLicenses = new HashMap(); - + public ListProperty getAdditionalLicenses() { + return additionalLicenses; + } /** * Add a new license type. *

@@ -139,7 +146,12 @@ public void additionalLicense(final String categoryName, String familyName, Stri throw new IllegalArgumentException("License category name must be exactly 5 characters, got " + categoryName); } - additionalLicenses.put(categoryName + familyName, pattern); + additionalLicenses.add(new License(categoryName, familyName, pattern)); + } + + @Inject + public LicenseHeadersTask(ObjectFactory objectFactory) { + additionalLicenses = objectFactory.listProperty(License.class).convention(conventionalLicenses); } @TaskAction @@ -160,14 +172,10 @@ public void runRat() { matchers.add(subStringMatcher("GEN ", "Generated", "ANTLR GENERATED CODE")); // Vendored Code matchers.add(subStringMatcher("VEN ", "Vendored", "@notice")); - // Dual SSPLv1 and Elastic - matchers.add(subStringMatcher("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server")); - for (Map.Entry additional : additionalLicenses.entrySet()) { - String category = additional.getKey().substring(0, 5); - String family = additional.getKey().substring(5); - matchers.add(subStringMatcher(category, family, additional.getValue())); - } + additionalLicenses.get().forEach(l -> + matchers.add(subStringMatcher(l.licenseFamilyCategory, l.licenseFamilyName, l.substringPattern)) + ); reportConfiguration.setHeaderMatcher(new HeaderMatcherMultiplexer(matchers.toArray(IHeaderMatcher[]::new))); reportConfiguration.setApprovedLicenseNames(approvedLicenses.stream().map(license -> { @@ -190,7 +198,6 @@ private IHeaderMatcher subStringMatcher(String licenseFamilyCategory, String lic SubstringLicenseMatcher substringLicenseMatcher = new SubstringLicenseMatcher(); substringLicenseMatcher.setLicenseFamilyCategory(licenseFamilyCategory); substringLicenseMatcher.setLicenseFamilyName(licenseFamilyName); - SubstringLicenseMatcher.Pattern pattern = new SubstringLicenseMatcher.Pattern(); pattern.setSubstring(substringPattern); substringLicenseMatcher.addConfiguredPattern(pattern); @@ -249,4 +256,16 @@ private static List elementList(NodeList resourcesNodes) { } return nodeList; } + + static class License implements Serializable { + private String licenseFamilyCategory; + private String licenseFamilyName; + private String substringPattern; + + public License(String licenseFamilyCategory, String licenseFamilyName, String substringPattern) { + this.licenseFamilyCategory = licenseFamilyCategory; + this.licenseFamilyName = licenseFamilyName; + this.substringPattern = substringPattern; + } + } } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/precommit/LicenseHeadersPrecommitPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/precommit/LicenseHeadersPrecommitPluginFuncTest.groovy index 0481d3315d010..67d2b96fb7b8f 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/precommit/LicenseHeadersPrecommitPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/precommit/LicenseHeadersPrecommitPluginFuncTest.groovy @@ -59,6 +59,45 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest { result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS } + def "supports sspl by convention"() { + given: + buildFile << """ + plugins { + id 'java' + id 'elasticsearch.internal-licenseheaders' + } + """ + dualLicensedFile() + + when: + def result = gradleRunner("licenseHeaders").build() + + then: + result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS + } + + def "sspl default additional license can be overridden"() { + given: + buildFile << """ + plugins { + id 'java' + id 'elasticsearch.internal-licenseheaders' + } + + tasks.named("licenseHeaders").configure { + additionalLicense 'ELAST', 'Elastic License 2.0', '2.0; you may not use this file except in compliance with the Elastic License' + } + """ + elasticLicensed() + dualLicensedFile() + + when: + def result = gradleRunner("licenseHeaders").buildAndFail() + + then: + result.task(":licenseHeaders").outcome == TaskOutcome.FAILED + } + private File unapprovedSourceFile(String filePath = "src/main/java/org/acme/UnapprovedLicensed.java") { File sourceFile = file(filePath); sourceFile << """ @@ -115,6 +154,21 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest { """ } + private File elasticLicensed() { + file("src/main/java/org/acme/ElasticLicensed.java") << """ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + + package org.acme; + public class ElasticLicensed { + } + """ + } + private String packageString(File sourceFile) { String normalizedPath = normalized(sourceFile.getPath()) (normalizedPath.substring(normalizedPath.indexOf("src/main/java")) - "src/main/java/" - ("/" + sourceFile.getName())).replaceAll("/", ".") diff --git a/build.gradle b/build.gradle index 1569cec42e9a5..60a2735f8cbe2 100644 --- a/build.gradle +++ b/build.gradle @@ -132,9 +132,9 @@ tasks.register("verifyVersions") { * after the backport of the backcompat code is complete. */ -boolean bwc_tests_enabled = true +boolean bwc_tests_enabled = false // place a PR link here when committing bwc changes: -String bwc_tests_disabled_issue = "" +String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/79385" /* * FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a * JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java index 11e0c8f24f755..480278db72917 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java @@ -37,13 +37,13 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.SniffConnectionStrategy; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.HashMap; @@ -316,7 +316,7 @@ public void testClusterHealthNotFoundIndex() throws IOException { assertThat(response.getStatus(), equalTo(ClusterHealthStatus.RED)); assertNoIndices(response); assertWarnings("The HTTP status code for a cluster health timeout will be changed from 408 to 200 in a " + - "future version. Set the [es.cluster_health.request_timeout_200] system property to [true] to suppress this message and " + + "future version. Set the [return_200_for_cluster_health_timeout] query parameter to [true] to suppress this message and " + "opt in to the future behaviour now."); } diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch index 59aabfc3ec368..7ba208a431e81 100755 --- a/distribution/src/bin/elasticsearch +++ b/distribution/src/bin/elasticsearch @@ -56,7 +56,7 @@ if [[ $ATTEMPT_SECURITY_AUTO_CONFIG = true ]]; then if ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.ConfigInitialNode \ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \ - "`dirname "$0"`"/elasticsearch-cli "$@" <<<"$KEYSTORE_PASSWORD"; then + bin/elasticsearch-cli "$@" <<<"$KEYSTORE_PASSWORD"; then : else retval=$? diff --git a/docs/reference/cluster/health.asciidoc b/docs/reference/cluster/health.asciidoc index e958a29d75bbc..92273b15e2e5e 100644 --- a/docs/reference/cluster/health.asciidoc +++ b/docs/reference/cluster/health.asciidoc @@ -97,6 +97,11 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] provided or better, i.e. `green` > `yellow` > `red`. By default, will not wait for any status. +`return_200_for_cluster_health_timeout`:: + (Optional, Boolean) A boolean value which controls whether to return HTTP 200 + status code instead of HTTP 408 in case of a cluster health timeout from + the server side. Defaults to false. + [[cluster-health-api-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index a5679bfe570f8..99b015e4f717a 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -507,7 +507,7 @@ include::{es-repo-dir}/tab-widgets/quick-start-cleanup-widget.asciidoc[] * Use {fleet} and {agent} to collect logs and metrics directly from your data sources and send them to {es}. See the -{fleet-guide}/fleet-quick-start.html[{fleet} quick start guide]. +{observability-guide}/ingest-logs-metrics-uptime.html[Ingest logs, metrics, and uptime data with {agent}]. * Use {kib} to explore, visualize, and manage your {es} data. See the {kibana-ref}/get-started.html[{kib} quick start guide]. diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index b0d6859a051f3..5280dc160cdd6 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -432,8 +432,7 @@ If you run {agent} standalone, you can apply pipelines using an <> or <> index setting. Alternatively, you can specify the `pipeline` policy setting in your `elastic-agent.yml` -configuration. See {fleet-guide}/run-elastic-agent-standalone.html[Run {agent} -standalone]. +configuration. See {fleet-guide}/install-standalone-elastic-agent.html[Install standalone {agent}s]. [discrete] [[access-source-fields]] diff --git a/docs/reference/modules/indices/recovery.asciidoc b/docs/reference/modules/indices/recovery.asciidoc index 1993d3da5db37..970c1b56af987 100644 --- a/docs/reference/modules/indices/recovery.asciidoc +++ b/docs/reference/modules/indices/recovery.asciidoc @@ -100,3 +100,10 @@ sent in parallel to the target node for each recovery. Defaults to `5`. + Do not increase this setting without carefully verifying that your cluster has the resources available to handle the extra load that will result. + +`indices.recovery.max_concurrent_snapshot_file_downloads_per_node`:: +(<>, Expert) Number of snapshot file downloads requests +execyted in parallel in the target node for all recoveries. Defaults to `25`. ++ +Do not increase this setting without carefully verifying that your cluster has +the resources available to handle the extra load that will result. diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/InstantiatingObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/InstantiatingObjectParser.java index 8c23a71965e73..89ccb670c5c3a 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/InstantiatingObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/InstantiatingObjectParser.java @@ -23,8 +23,12 @@ *

* The main differences being that it is using Builder to construct the parser and takes a class of the target object instead of the object * builder. The target object must have exactly one constructor with the number and order of arguments matching the number of order of - * declared fields. If there are more then 2 constructors with the same number of arguments, one of them needs to be marked with + * declared fields. If there are more than 2 constructors with the same number of arguments, one of them needs to be marked with * {@linkplain ParserConstructor} annotation. + * + * It is also possible for the constructor to accept Context as the first parameter, in this case as in the case with multiple constructors + * it is required for the constructor to be marked with {@linkplain ParserConstructor} annotation. + * *

{@code
  *   public static class Thing{
  *       public Thing(String animal, String vegetable, int mineral) {
@@ -37,14 +41,35 @@
  *
  *   }
  *
- *   private static final InstantiatingObjectParser PARSER = new InstantiatingObjectParser<>("thing", Thing.class);
+ *   private static final InstantiatingObjectParser PARSER;
+ *   static {
+ *       InstantiatingObjectParser.Builder parser =
+ *           InstantiatingObjectParser,builder<>("thing", true, Thing.class);
+ *       parser.declareString(constructorArg(), new ParseField("animal"));
+ *       parser.declareString(constructorArg(), new ParseField("vegetable"));
+ *       parser.declareInt(optionalConstructorArg(), new ParseField("mineral"));
+ *       parser.declareInt(Thing::setFruit, new ParseField("fruit"));
+ *       parser.declareInt(Thing::setBug, new ParseField("bug"));
+ *       PARSER = parser.build()
+ *   }
+ * }
+ *
{@code
+ *
+ *   public static class AnotherThing {
+ *       @ParserConstructor
+ *       public AnotherThing(SomeContext continent, String animal, String vegetable, int mineral) {
+ *           ....
+ *       }
+ *   }
+ *
+ *   private static final InstantiatingObjectParser PARSER;
  *   static {
- *       PARSER.declareString(constructorArg(), new ParseField("animal"));
- *       PARSER.declareString(constructorArg(), new ParseField("vegetable"));
- *       PARSER.declareInt(optionalConstructorArg(), new ParseField("mineral"));
- *       PARSER.declareInt(Thing::setFruit, new ParseField("fruit"));
- *       PARSER.declareInt(Thing::setBug, new ParseField("bug"));
- *       PARSER.finalizeFields()
+ *       InstantiatingObjectParser.Builder parser =
+ *           InstantiatingObjectParser,builder<>("thing", true, AnotherThing.class);
+ *       parser.declareString(constructorArg(), new ParseField("animal"));
+ *       parser.declareString(constructorArg(), new ParseField("vegetable"));
+ *       parser.declareInt(optionalConstructorArg(), new ParseField("mineral"));
+ *       PARSER = parser.build()
  *   }
  * }
*/ @@ -72,7 +97,7 @@ public Builder(String name, Class valueClass) { } public Builder(String name, boolean ignoreUnknownFields, Class valueClass) { - this.constructingObjectParser = new ConstructingObjectParser<>(name, ignoreUnknownFields, this::build); + this.constructingObjectParser = new ConstructingObjectParser<>(name, ignoreUnknownFields, this::buildInstance); this.valueClass = valueClass; } @@ -87,9 +112,15 @@ public InstantiatingObjectParser build() { throw new IllegalArgumentException("More then one public constructor with @ParserConstructor annotation exist in " + "the class " + valueClass.getName()); } - if (c.getParameterCount() != neededArguments) { - throw new IllegalArgumentException("Annotated constructor doesn't have " + neededArguments + - " arguments in the class " + valueClass.getName()); + if (c.getParameterCount() < neededArguments || c.getParameterCount() > neededArguments + 1) { + throw new IllegalArgumentException( + "Annotated constructor doesn't have " + + neededArguments + + " or " + + (neededArguments + 1) + + " arguments in the class " + + valueClass.getName() + ); } constructor = c; } @@ -154,13 +185,20 @@ public void declareExclusiveFieldSet(String... exclusiveSet) { constructingObjectParser.declareExclusiveFieldSet(exclusiveSet); } - private Value build(Object[] args) { + private Value buildInstance(Object[] args, Context context) { if (constructor == null) { throw new IllegalArgumentException("InstantiatingObjectParser for type " + valueClass.getName() + " has to be finalized " + "before the first use"); } try { - return constructor.newInstance(args); + if (constructor.getParameterCount() != args.length) { + Object[] newArgs = new Object[args.length + 1]; + System.arraycopy(args, 0, newArgs, 1, args.length); + newArgs[0] = context; + return constructor.newInstance(newArgs); + } else { + return constructor.newInstance(args); + } } catch (Exception ex) { throw new IllegalArgumentException("Cannot instantiate an object of " + valueClass.getName(), ex); } diff --git a/libs/x-content/src/test/java/org/elasticsearch/xcontent/InstantiatingObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/InstantiatingObjectParserTests.java index db155c2334851..34f02b373582e 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/xcontent/InstantiatingObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/InstantiatingObjectParserTests.java @@ -8,11 +8,8 @@ package org.elasticsearch.xcontent; -import org.elasticsearch.xcontent.InstantiatingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ParserConstructor; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.Objects; @@ -217,8 +214,10 @@ public void testAnnotationWrongArgumentNumber() { InstantiatingObjectParser.Builder builder = InstantiatingObjectParser.builder("foo", Annotations.class); builder.declareInt(constructorArg(), new ParseField("a")); builder.declareString(constructorArg(), new ParseField("b")); + builder.declareInt(constructorArg(), new ParseField("c")); + builder.declareString(constructorArg(), new ParseField("d")); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), containsString("Annotated constructor doesn't have 2 arguments in the class")); + assertThat(e.getMessage(), containsString("Annotated constructor doesn't have 4 or 5 arguments in the class")); } public void testDoubleDeclarationThrowsException() throws IOException { @@ -240,4 +239,80 @@ class DoubleFieldDeclaration { assertThat(exception, instanceOf(IllegalArgumentException.class)); assertThat(exception.getMessage(), startsWith("Parser already registered for name=[name]")); } + + public static class ContextArgument { + final String context; + final int a; + final String b; + final long c; + + public ContextArgument() { + this(1, "2", 3); + } + + public ContextArgument(int a, String b) { + this(a, b, -1); + } + + + public ContextArgument(int a, String b, long c) { + this(null, a, b, c); + } + + public ContextArgument(String context, int a, String b, long c) { + this.context = context; + this.a = a; + this.b = b; + this.c = c; + } + + @ParserConstructor + public ContextArgument(String context, int a, String b, String c) { + this.context = context; + this.a = a; + this.b = b; + this.c = Long.parseLong(c); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextArgument that = (ContextArgument) o; + return a == that.a && + c == that.c && + Objects.equals(b, that.b); + } + + @Override + public int hashCode() { + return Objects.hash(a, b, c); + } + } + + public void testContextAsArgument() throws IOException { + InstantiatingObjectParser.Builder builder = InstantiatingObjectParser.builder( + "foo", + ContextArgument.class + ); + builder.declareInt(constructorArg(), new ParseField("a")); + builder.declareString(constructorArg(), new ParseField("b")); + builder.declareString(constructorArg(), new ParseField("c")); + InstantiatingObjectParser parser = builder.build(); + try (XContentParser contentParser = createParser(JsonXContent.jsonXContent, "{\"a\": 5, \"b\":\"6\", \"c\": \"7\"}")) { + assertThat(parser.parse(contentParser, "context"), equalTo(new ContextArgument("context", 5, "6", 7))); + } + } + + public void testContextAsArgumentWrongArgumentNumber() { + InstantiatingObjectParser.Builder builder = InstantiatingObjectParser.builder( + "foo", + ContextArgument.class + ); + builder.declareInt(constructorArg(), new ParseField("a")); + builder.declareString(constructorArg(), new ParseField("b")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build); + assertThat(e.getMessage(), containsString("Annotated constructor doesn't have 2 or 3 arguments in the class")); + } + } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/AbstractXContentFilteringTestCase.java similarity index 98% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 87d4d92c7a910..af7e9aae149d8 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -6,10 +6,11 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.support.AbstractFilteringTestCase; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContent; @@ -17,8 +18,6 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.common.xcontent.support.AbstractFilteringTestCase; -import org.elasticsearch.xcontent.support.filtering.FilterPath; import java.io.IOException; import java.util.Arrays; @@ -142,6 +141,8 @@ static void assertXContentBuilderAsBytes(final XContentBuilder expected, final X assertThat(jsonParser.numberType(), equalTo(testParser.numberType())); assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue())); break; + default: + break; } } } catch (Exception e) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborXContentFilteringTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/CborXContentFilteringTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborXContentFilteringTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/CborXContentFilteringTests.java index ce4c5d005c759..5b2dce8e10106 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborXContentFilteringTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/CborXContentFilteringTests.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java index 3d73c8717e7ef..ad669a3e61b5d 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java @@ -6,14 +6,14 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.filter.FilteringGeneratorDelegate; + import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.support.filtering.FilterPathBasedFilter; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java index 2046772e0afcf..c8a65e90a4c3a 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/FilterPathTests.java @@ -6,10 +6,9 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.support.filtering.FilterPath; import java.util.Arrays; import java.util.LinkedHashSet; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonXContentFilteringTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/JsonXContentFilteringTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonXContentFilteringTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/JsonXContentFilteringTests.java index fffdbb2ad8818..5a27954754d43 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonXContentFilteringTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/JsonXContentFilteringTests.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/SmileFilteringGeneratorTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/SmileFilteringGeneratorTests.java index 7c54668d17192..13efcc0738949 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/SmileFilteringGeneratorTests.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/YamlFilteringGeneratorTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java rename to libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/YamlFilteringGeneratorTests.java index 00769671707a2..ada8b696c5d64 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/support/filtering/YamlFilteringGeneratorTests.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.common.xcontent.support.filtering; +package org.elasticsearch.xcontent.support.filtering; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml index 780f33be52dc0..68fa8d6c86014 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml @@ -1,8 +1,8 @@ --- "Test pipeline versioned updates": - skip: - version: " - 7.99.99" - reason: "re-enable in 7.16+ when backported" + version: " - 7.15.99" + reason: "added versioned updates in 7.16.0" - do: ingest.put_pipeline: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json index 91712bbbded29..7d33fdd52ab81 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json @@ -102,6 +102,10 @@ "red" ], "description":"Wait until cluster is in a specific state" + }, + "return_200_for_cluster_health_timeout":{ + "type":"boolean", + "description":"Whether to return HTTP 200 instead of 408 in case of a cluster health timeout from the server side" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json new file mode 100644 index 0000000000000..b55f35ccab4fe --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/knn_search.json @@ -0,0 +1,40 @@ +{ + "knn_search":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html", + "description":"Performs a kNN search." + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/{index}/_knn_search", + "methods":[ + "GET", + "POST" + ], + "parts":{ + "index":{ + "type":"list", + "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" + } + } + } + ] + }, + "params": { + "routing":{ + "type":"list", + "description":"A comma-separated list of specific routing values" + } + }, + "body":{ + "description":"The search definition" + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/20_request_timeout.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/20_request_timeout.yml index 66a7cb2b48dbd..e5a4db4dbfd9f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/20_request_timeout.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/20_request_timeout.yml @@ -35,3 +35,25 @@ - match: { initializing_shards: 0 } - match: { unassigned_shards: 0 } - gte: { number_of_pending_tasks: 0 } + +--- +"cluster health request timeout with 200 response code": + - skip: + version: " - 7.99.99" + reason: "return_200_for_cluster_health_timeout exists only in 8.0.0; re-enable in 7.16+ when back-ported" + - do: + cluster.health: + timeout: 1ms + wait_for_active_shards: 5 + return_200_for_cluster_health_timeout: true + + - is_true: cluster_name + - is_true: timed_out + - gte: { number_of_nodes: 1 } + - gte: { number_of_data_nodes: 1 } + - match: { active_primary_shards: 0 } + - match: { active_shards: 0 } + - match: { relocating_shards: 0 } + - match: { initializing_shards: 0 } + - match: { unassigned_shards: 0 } + - gte: { number_of_pending_tasks: 0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.put_settings/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.put_settings/10_basic.yml index 05937b73324bd..cd971882316d9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.put_settings/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.put_settings/10_basic.yml @@ -1,7 +1,7 @@ --- "Test put and reset transient settings": - skip: - version: " - 7.99.99" + version: " - 7.15.99" reason: "transient settings deprecation" features: "warnings" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml index dc6e730248860..57cc4a1aa0deb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml @@ -1,5 +1,9 @@ --- "Test basic pipeline crud": + - skip: + version: all + reason: https://github.com/elastic/elasticsearch/issues/79377 + - do: ingest.put_pipeline: id: "my_pipeline" @@ -28,6 +32,10 @@ --- "Test Put Versioned Pipeline": + - skip: + version: all + reason: https://github.com/elastic/elasticsearch/issues/79377 + - do: ingest.put_pipeline: id: "my_pipeline" @@ -118,6 +126,10 @@ id: "my_pipeline" --- "Test Get All Pipelines": + - skip: + version: all + reason: https://github.com/elastic/elasticsearch/issues/79377 + - do: ingest.put_pipeline: id: "first_pipeline" @@ -142,6 +154,10 @@ --- "Test invalid config": + - skip: + version: all + reason: https://github.com/elastic/elasticsearch/issues/79377 + - do: catch: /parse_exception/ ingest.put_pipeline: @@ -156,8 +172,12 @@ --- "Test Get Summarized Pipelines": - skip: - version: " - 7.12.99" - reason: "summary option added in 7.13" + version: all + reason: https://github.com/elastic/elasticsearch/issues/79377 + +# - skip: +# version: " - 7.12.99" +# reason: "summary option added in 7.13" - do: ingest.put_pipeline: diff --git a/server/build.gradle b/server/build.gradle index af891c853ac9e..51103c109fd13 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.build' apply plugin: 'nebula.optional-base' apply plugin: 'elasticsearch.publish' @@ -127,6 +129,12 @@ tasks.named("processResources").configure { dependsOn generateModulesList, generatePluginsList } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.index_mode_feature_flag_registered', 'true' + } +} + tasks.named("thirdPartyAudit").configure { ignoreMissingClasses( // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/SnapshotBasedIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/SnapshotBasedIndexRecoveryIT.java index 561955271e57d..5dd6d8633d9d7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/SnapshotBasedIndexRecoveryIT.java @@ -31,11 +31,11 @@ import org.elasticsearch.common.blobstore.support.FilterBlobContainer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.shard.IndexShard; @@ -56,11 +56,15 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -71,10 +75,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS; +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -827,6 +833,266 @@ public void testSeqNoBasedRecoveryIsUsedAfterPrimaryFailOver() throws Exception } } + public void testRecoveryUsingSnapshotsIsThrottledPerNode() throws Exception { + executeRecoveryWithSnapshotFileDownloadThrottled((indices, + sourceNode, + targetNode, + targetMockTransportService, + recoverySnapshotFileRequests, + awaitForRecoverSnapshotFileRequestReceived, + respondToRecoverSnapshotFile) -> { + String indexRecoveredFromSnapshot1 = indices.get(0); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot1) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + awaitForRecoverSnapshotFileRequestReceived.run(); + + // Ensure that peer recoveries can make progress without restoring snapshot files + // while the permit is granted to a different recovery + String indexRecoveredFromPeer = indices.get(1); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromPeer) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + ensureGreen(indexRecoveredFromPeer); + assertPeerRecoveryDidNotUseSnapshots(indexRecoveredFromPeer, sourceNode, targetNode); + + // let snapshot file restore to proceed + respondToRecoverSnapshotFile.run(); + + ensureGreen(indexRecoveredFromSnapshot1); + + assertPeerRecoveryUsedSnapshots(indexRecoveredFromSnapshot1, sourceNode, targetNode); + + for (RecoverySnapshotFileRequest recoverySnapshotFileRequest : recoverySnapshotFileRequests) { + String indexName = recoverySnapshotFileRequest.getShardId().getIndexName(); + assertThat(indexName, is(equalTo(indexRecoveredFromSnapshot1))); + } + + targetMockTransportService.clearAllRules(); + + String indexRecoveredFromSnapshot2 = indices.get(2); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot2) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + ensureGreen(indexRecoveredFromSnapshot2); + + assertPeerRecoveryUsedSnapshots(indexRecoveredFromSnapshot2, sourceNode, targetNode); + + }); + } + + public void testRecoveryUsingSnapshotsPermitIsReturnedAfterFailureOrCancellation() throws Exception { + executeRecoveryWithSnapshotFileDownloadThrottled((indices, + sourceNode, + targetNode, + targetMockTransportService, + recoverySnapshotFileRequests, + awaitForRecoverSnapshotFileRequestReceived, + respondToRecoverSnapshotFile) -> { + String indexRecoveredFromSnapshot1 = indices.get(0); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot1) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + awaitForRecoverSnapshotFileRequestReceived.run(); + + targetMockTransportService.clearAllRules(); + + boolean cancelRecovery = randomBoolean(); + if (cancelRecovery) { + assertAcked(client().admin().indices().prepareDelete(indexRecoveredFromSnapshot1).get()); + + respondToRecoverSnapshotFile.run(); + + assertThat(indexExists(indexRecoveredFromSnapshot1), is(equalTo(false))); + } else { + // Recovery would fail and should release the granted permit and allow other + // recoveries to use snapshots + CountDownLatch cleanFilesRequestReceived = new CountDownLatch(1); + AtomicReference channelRef = new AtomicReference<>(); + targetMockTransportService.addRequestHandlingBehavior(PeerRecoveryTargetService.Actions.CLEAN_FILES, + (handler, request, channel, task) -> { + channelRef.compareAndExchange(null, channel); + cleanFilesRequestReceived.countDown(); + } + ); + + respondToRecoverSnapshotFile.run(); + cleanFilesRequestReceived.await(); + + targetMockTransportService.clearAllRules(); + channelRef.get().sendResponse(new IOException("unable to clean files")); + } + + String indexRecoveredFromSnapshot2 = indices.get(1); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot2) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + ensureGreen(indexRecoveredFromSnapshot2); + + assertPeerRecoveryUsedSnapshots(indexRecoveredFromSnapshot2, sourceNode, targetNode); + }); + } + + public void testRecoveryReEstablishKeepsTheGrantedSnapshotFileDownloadPermit() throws Exception { + executeRecoveryWithSnapshotFileDownloadThrottled((indices, + sourceNode, + targetNode, + targetMockTransportService, + recoverySnapshotFileRequests, + awaitForRecoverSnapshotFileRequestReceived, + respondToRecoverSnapshotFile) -> { + AtomicReference startRecoveryConnection = new AtomicReference<>(); + CountDownLatch reestablishRecoverySent = new CountDownLatch(1); + targetMockTransportService.addSendBehavior((connection, requestId, action, request, options) -> { + if (action.equals(PeerRecoverySourceService.Actions.START_RECOVERY)) { + startRecoveryConnection.compareAndExchange(null, connection); + } else if (action.equals(PeerRecoverySourceService.Actions.REESTABLISH_RECOVERY)) { + reestablishRecoverySent.countDown(); + } + connection.sendRequest(requestId, action, request, options); + }); + + String indexRecoveredFromSnapshot1 = indices.get(0); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot1) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + awaitForRecoverSnapshotFileRequestReceived.run(); + + startRecoveryConnection.get().close(); + + reestablishRecoverySent.await(); + + String indexRecoveredFromPeer = indices.get(1); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromPeer) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + ensureGreen(indexRecoveredFromPeer); + assertPeerRecoveryDidNotUseSnapshots(indexRecoveredFromPeer, sourceNode, targetNode); + + respondToRecoverSnapshotFile.run(); + + ensureGreen(indexRecoveredFromSnapshot1); + assertPeerRecoveryUsedSnapshots(indexRecoveredFromSnapshot1, sourceNode, targetNode); + + targetMockTransportService.clearAllRules(); + + final String indexRecoveredFromSnapshot2 = indices.get(2); + assertAcked( + client().admin().indices().prepareUpdateSettings(indexRecoveredFromSnapshot2) + .setSettings(Settings.builder() + .put("index.routing.allocation.require._name", targetNode)).get() + ); + + ensureGreen(indexRecoveredFromSnapshot2); + assertPeerRecoveryUsedSnapshots(indexRecoveredFromSnapshot2, sourceNode, targetNode); + }); + } + + private void executeRecoveryWithSnapshotFileDownloadThrottled(SnapshotBasedRecoveryThrottlingTestCase testCase) throws Exception { + updateSetting(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.getKey(), "1"); + updateSetting(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), "1"); + + try { + List dataNodes = internalCluster().startDataOnlyNodes(2); + List indices = new ArrayList<>(); + for (int i = 0; i < 3; i++) { + String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) + .put(IndexService.GLOBAL_CHECKPOINT_SYNC_INTERVAL_SETTING.getKey(), "1s") + .put("index.routing.allocation.require._name", dataNodes.get(0)) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .build() + ); + indices.add(indexName); + } + + String repoName = "repo"; + createRepo(repoName, "fs"); + + for (String indexName : indices) { + int numDocs = randomIntBetween(300, 1000); + indexDocs(indexName, numDocs, numDocs); + + createSnapshot(repoName, "snap-" + indexName, Collections.singletonList(indexName)); + } + + String sourceNode = dataNodes.get(0); + String targetNode = dataNodes.get(1); + MockTransportService targetMockTransportService = + (MockTransportService) internalCluster().getInstance(TransportService.class, targetNode); + + List recoverySnapshotFileRequests = Collections.synchronizedList(new ArrayList<>()); + CountDownLatch recoverSnapshotFileRequestReceived = new CountDownLatch(1); + CountDownLatch respondToRecoverSnapshotFile = new CountDownLatch(1); + targetMockTransportService.addRequestHandlingBehavior(PeerRecoveryTargetService.Actions.RESTORE_FILE_FROM_SNAPSHOT, + (handler, request, channel, task) -> { + recoverySnapshotFileRequests.add((RecoverySnapshotFileRequest) request); + recoverSnapshotFileRequestReceived.countDown(); + respondToRecoverSnapshotFile.await(); + handler.messageReceived(request, channel, task); + } + ); + + testCase.execute(indices, + sourceNode, + targetNode, + targetMockTransportService, + recoverySnapshotFileRequests, + recoverSnapshotFileRequestReceived::await, + respondToRecoverSnapshotFile::countDown + ); + } finally { + updateSetting(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), null); + updateSetting(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.getKey(), null); + } + } + + interface SnapshotBasedRecoveryThrottlingTestCase { + void execute(List indices, + String sourceNode, + String targetNode, + MockTransportService targetMockTransportService, + List recoverySnapshotFileRequests, + CheckedRunnable awaitForRecoverSnapshotFileRequestReceived, + Runnable respondToRecoverSnapshotFile) throws Exception; + } + + private void assertPeerRecoveryUsedSnapshots(String indexName, String sourceNode, String targetNode) { + RecoveryState recoveryStateIndexRecoveredFromPeer = getLatestPeerRecoveryStateForShard(indexName, 0); + assertPeerRecoveryWasSuccessful(recoveryStateIndexRecoveredFromPeer, sourceNode, targetNode); + assertThat(recoveryStateIndexRecoveredFromPeer.getIndex().recoveredFromSnapshotBytes(), is(greaterThan(0L))); + } + + private void assertPeerRecoveryDidNotUseSnapshots(String indexName, String sourceNode, String targetNode) { + RecoveryState recoveryStateIndexRecoveredFromPeer = getLatestPeerRecoveryStateForShard(indexName, 0); + assertPeerRecoveryWasSuccessful(recoveryStateIndexRecoveredFromPeer, sourceNode, targetNode); + assertThat(recoveryStateIndexRecoveredFromPeer.getIndex().recoveredFromSnapshotBytes(), is(equalTo(0L))); + } + private Store.MetadataSnapshot getMetadataSnapshot(String nodeName, String indexName) throws IOException { ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName); @@ -926,7 +1192,7 @@ private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchRes } } - private void assertPeerRecoveryWasSuccessful(RecoveryState recoveryState, String sourceNode, String targetNode) throws Exception { + private void assertPeerRecoveryWasSuccessful(RecoveryState recoveryState, String sourceNode, String targetNode) { assertThat(recoveryState.getStage(), equalTo(RecoveryState.Stage.DONE)); assertThat(recoveryState.getRecoverySource(), equalTo(RecoverySource.PeerRecoverySource.INSTANCE)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index 8842856aa3fa6..d4aa7f1177bd5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -35,6 +35,8 @@ public class ClusterHealthRequest extends MasterNodeReadRequest INDEX_PARSER = (XContentParser parser, Void context, String index) -> ClusterIndexHealth.innerFromXContent(parser, index); - private static final String ES_CLUSTER_HEALTH_REQUEST_TIMEOUT_200_KEY = "es.cluster_health.request_timeout_200"; + static final String ES_CLUSTER_HEALTH_REQUEST_TIMEOUT_200_KEY = "return_200_for_cluster_health_timeout"; static final String CLUSTER_HEALTH_REQUEST_TIMEOUT_DEPRECATION_MSG = "The HTTP status code for a cluster health timeout " + "will be changed from 408 to 200 in a future version. Set the [" + ES_CLUSTER_HEALTH_REQUEST_TIMEOUT_200_KEY + "] " + - "system property to [true] to suppress this message and opt in to the future behaviour now."; + "query parameter to [true] to suppress this message and opt in to the future behaviour now."; static { // ClusterStateHealth fields @@ -137,15 +138,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo private boolean timedOut = false; private ClusterStateHealth clusterStateHealth; private ClusterHealthStatus clusterHealthStatus; - private boolean esClusterHealthRequestTimeout200 = readEsClusterHealthRequestTimeout200FromProperty(); - - public ClusterHealthResponse() { - } - - /** For the testing of opting in for the 200 status code without setting a system property */ - ClusterHealthResponse(boolean esClusterHealthRequestTimeout200) { - this.esClusterHealthRequestTimeout200 = esClusterHealthRequestTimeout200; - } + private boolean return200ForClusterHealthTimeout; public ClusterHealthResponse(StreamInput in) throws IOException { super(in); @@ -157,15 +150,21 @@ public ClusterHealthResponse(StreamInput in) throws IOException { numberOfInFlightFetch = in.readInt(); delayedUnassignedShards= in.readInt(); taskMaxWaitingTime = in.readTimeValue(); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + return200ForClusterHealthTimeout = in.readBoolean(); + } } /** needed for plugins BWC */ - public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) { - this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0)); + public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState, + boolean return200ForServerTimeout) { + this(clusterName, concreteIndices, clusterState, -1, -1, -1, TimeValue.timeValueHours(0), + return200ForServerTimeout); } public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState, int numberOfPendingTasks, - int numberOfInFlightFetch, int delayedUnassignedShards, TimeValue taskMaxWaitingTime) { + int numberOfInFlightFetch, int delayedUnassignedShards, TimeValue taskMaxWaitingTime, + boolean return200ForServerTimeout) { this.clusterName = clusterName; this.numberOfPendingTasks = numberOfPendingTasks; this.numberOfInFlightFetch = numberOfInFlightFetch; @@ -173,6 +172,7 @@ public ClusterHealthResponse(String clusterName, String[] concreteIndices, Clust this.taskMaxWaitingTime = taskMaxWaitingTime; this.clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices); this.clusterHealthStatus = clusterStateHealth.getStatus(); + this.return200ForClusterHealthTimeout = return200ForServerTimeout; } /** @@ -304,6 +304,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInt(numberOfInFlightFetch); out.writeInt(delayedUnassignedShards); out.writeTimeValue(taskMaxWaitingTime); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeBoolean(return200ForClusterHealthTimeout); + } else if (return200ForClusterHealthTimeout) { + throw new IllegalArgumentException("Can't fix response code in a cluster involving nodes with version " + out.getVersion()); + } } @Override @@ -316,7 +321,7 @@ public RestStatus status() { if (isTimedOut() == false) { return RestStatus.OK; } - if (esClusterHealthRequestTimeout200) { + if (return200ForClusterHealthTimeout) { return RestStatus.OK; } else { deprecationLogger.compatibleCritical("cluster_health_request_timeout", CLUSTER_HEALTH_REQUEST_TIMEOUT_DEPRECATION_MSG); @@ -381,17 +386,4 @@ public int hashCode() { return Objects.hash(clusterName, numberOfPendingTasks, numberOfInFlightFetch, delayedUnassignedShards, taskMaxWaitingTime, timedOut, clusterStateHealth, clusterHealthStatus); } - - private static boolean readEsClusterHealthRequestTimeout200FromProperty() { - String property = System.getProperty(ES_CLUSTER_HEALTH_REQUEST_TIMEOUT_200_KEY); - if (property == null) { - return false; - } - if (Boolean.parseBoolean(property)) { - return true; - } else { - throw new IllegalArgumentException(ES_CLUSTER_HEALTH_REQUEST_TIMEOUT_200_KEY + " can only be unset or [true] but was [" - + property + "]"); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 83d4469e3b19d..ee261c253a2e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -30,8 +30,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.tasks.Task; @@ -225,7 +225,8 @@ private enum TimeoutState { private ClusterHealthResponse getResponse(final ClusterHealthRequest request, ClusterState clusterState, final int waitFor, TimeoutState timeoutState) { - ClusterHealthResponse response = clusterHealth(request, clusterState, clusterService.getMasterService().numberOfPendingTasks(), + ClusterHealthResponse response = clusterHealth(request, clusterState, + clusterService.getMasterService().numberOfPendingTasks(), allocationService.getNumberOfInFlightFetches(), clusterService.getMasterService().getMaxTaskWaitTime()); int readyCounter = prepareResponse(request, response, clusterState, indexNameExpressionResolver); boolean valid = (readyCounter == waitFor); @@ -324,8 +325,8 @@ static int prepareResponse(final ClusterHealthRequest request, final ClusterHeal } - private ClusterHealthResponse clusterHealth(ClusterHealthRequest request, ClusterState clusterState, int numberOfPendingTasks, - int numberOfInFlightFetch, TimeValue pendingTaskTimeInQueue) { + private ClusterHealthResponse clusterHealth(ClusterHealthRequest request, ClusterState clusterState, + int numberOfPendingTasks, int numberOfInFlightFetch, TimeValue pendingTaskTimeInQueue) { if (logger.isTraceEnabled()) { logger.trace("Calculating health based on state version [{}]", clusterState.version()); } @@ -337,12 +338,13 @@ private ClusterHealthResponse clusterHealth(ClusterHealthRequest request, Cluste // one of the specified indices is not there - treat it as RED. ClusterHealthResponse response = new ClusterHealthResponse(clusterState.getClusterName().value(), Strings.EMPTY_ARRAY, clusterState, numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), - pendingTaskTimeInQueue); + pendingTaskTimeInQueue, request.doesReturn200ForClusterHealthTimeout()); response.setStatus(ClusterHealthStatus.RED); return response; } - return new ClusterHealthResponse(clusterState.getClusterName().value(), concreteIndices, clusterState, numberOfPendingTasks, - numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), pendingTaskTimeInQueue); + return new ClusterHealthResponse(clusterState.getClusterName().value(), concreteIndices, clusterState, + numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), pendingTaskTimeInQueue, + request.doesReturn200ForClusterHealthTimeout()); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index f0a7532dc80ac..13a9d28dadf4e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -10,12 +10,14 @@ import org.elasticsearch.Version; import org.elasticsearch.index.mapper.TimeSeriesParams; +import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -159,6 +161,59 @@ public FieldCapabilities(String name, String type, } + /** + * Constructor for a set of indices used by parser + * @param name The name of the field + * @param type The type associated with the field. + * @param isMetadataField Whether this field is a metadata field. + * @param isSearchable Whether this field is indexed for search. + * @param isAggregatable Whether this field can be aggregated on. + * @param isDimension Whether this field can be used as dimension + * @param metricType If this field is a metric field, returns the metric's type or null for non-metrics fields + * @param indices The list of indices where this field name is defined as {@code type}, + * or null if all indices have the same {@code type} for the field. + * @param nonSearchableIndices The list of indices where this field is not searchable, + * or null if the field is searchable in all indices. + * @param nonAggregatableIndices The list of indices where this field is not aggregatable, + * or null if the field is aggregatable in all indices. + * @param nonDimensionIndices The list of indices where this field is not a dimension + * @param metricConflictsIndices The list of indices where this field is has different metric types or not mark as a metric + * @param meta Merged metadata across indices. + */ + @SuppressWarnings("unused") + @ParserConstructor + public FieldCapabilities( + String name, + String type, + Boolean isMetadataField, + boolean isSearchable, + boolean isAggregatable, + Boolean isDimension, + String metricType, + List indices, + List nonSearchableIndices, + List nonAggregatableIndices, + List nonDimensionIndices, + List metricConflictsIndices, + Map> meta + ) { + this( + name, + type, + isMetadataField == null ? false : isMetadataField, + isSearchable, + isAggregatable, + isDimension == null ? false : isDimension, + metricType != null ? Enum.valueOf(TimeSeriesParams.MetricType.class, metricType) : null, + indices != null ? indices.toArray(new String[0]) : null, + nonSearchableIndices != null ? nonSearchableIndices.toArray(new String[0]) : null, + nonAggregatableIndices != null ? nonAggregatableIndices.toArray(new String[0]) : null, + nonDimensionIndices != null ? nonDimensionIndices.toArray(new String[0]) : null, + metricConflictsIndices != null ? metricConflictsIndices.toArray(new String[0]) : null, + meta != null ? meta : Collections.emptyMap() + ); + } + FieldCapabilities(StreamInput in) throws IOException { this.name = in.readString(); this.type = in.readString(); @@ -254,43 +309,31 @@ public static FieldCapabilities fromXContent(String name, XContentParser parser) } @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_capabilities", - true, - (a, name) -> new FieldCapabilities( - name, - (String) a[0], - a[3] == null ? false : (boolean) a[3], - (boolean) a[1], - (boolean) a[2], - a[4] == null ? false : (boolean) a[4], - a[5] != null ? Enum.valueOf(TimeSeriesParams.MetricType.class, (String) a[5]) : null, - a[6] != null ? ((List) a[6]).toArray(new String[0]) : null, - a[7] != null ? ((List) a[7]).toArray(new String[0]) : null, - a[8] != null ? ((List) a[8]).toArray(new String[0]) : null, - a[9] != null ? ((List) a[9]).toArray(new String[0]) : null, - a[10] != null ? ((List) a[10]).toArray(new String[0]) : null, - a[11] != null ? ((Map>) a[11]) : Collections.emptyMap() - ) - ); + private static final InstantiatingObjectParser PARSER; static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD); // 0 - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), SEARCHABLE_FIELD); // 1 - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), AGGREGATABLE_FIELD); // 2 - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), IS_METADATA_FIELD); // 3 - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_DIMENSION_FIELD); // 4 - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_METRIC_FIELD); // 5 - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD); // 6 - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_SEARCHABLE_INDICES_FIELD); // 7 - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_AGGREGATABLE_INDICES_FIELD); // 8 - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_DIMENSION_INDICES_FIELD); // 9 - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), METRIC_CONFLICTS_INDICES_FIELD); // 10 - PARSER.declareObject( + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "field_capabilities", + true, + FieldCapabilities.class + ); + parser.declareString(ConstructingObjectParser.constructorArg(), TYPE_FIELD); + parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), IS_METADATA_FIELD); + parser.declareBoolean(ConstructingObjectParser.constructorArg(), SEARCHABLE_FIELD); + parser.declareBoolean(ConstructingObjectParser.constructorArg(), AGGREGATABLE_FIELD); + parser.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_DIMENSION_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TIME_SERIES_METRIC_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_SEARCHABLE_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_AGGREGATABLE_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), NON_DIMENSION_INDICES_FIELD); + parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), METRIC_CONFLICTS_INDICES_FIELD); + parser.declareObject( ConstructingObjectParser.optionalConstructorArg(), - (parser, context) -> parser.map(HashMap::new, p -> Set.copyOf(p.list())), + (p, context) -> p.map(HashMap::new, v -> Set.copyOf(v.list())), META_FIELD - ); // 11 + ); + PARSER = parser.build(); } /** diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index b5cf692723dd7..cfdb932199b88 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -71,7 +71,7 @@ public TransportFieldCapabilitiesAction(TransportService transportService, this.fieldCapabilitiesFetcher = new FieldCapabilitiesFetcher(indicesService); final Set metadataFields = indicesService.getAllMetadataFields(); this.metadataFieldPred = metadataFields::contains; - transportService.registerRequestHandler(ACTION_NODE_NAME, ThreadPool.Names.MANAGEMENT, + transportService.registerRequestHandler(ACTION_NODE_NAME, ThreadPool.Names.SEARCH_COORDINATION, FieldCapabilitiesNodeRequest::new, new NodeTransportHandler()); } @@ -111,7 +111,7 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti localIndices, nowInMillis, concreteIndices, - threadPool.executor(ThreadPool.Names.MANAGEMENT), + threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION), indexResponse -> indexResponses.putIfAbsent(indexResponse.getIndexName(), indexResponse), indexFailures::collect, countDown @@ -163,7 +163,7 @@ private Runnable createResponseMerger(FieldCapabilitiesRequest request, if (request.isMergeResults()) { // fork off to the management pool for merging the responses as the operation can run for longer than is acceptable // on a transport thread in case of large numbers of indices and/or fields - threadPool.executor(ThreadPool.Names.MANAGEMENT).submit( + threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION).submit( ActionRunnable.supply( listener, () -> merge(indexResponses, request.includeUnmapped(), new ArrayList<>(failures))) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index 918bbf426b2b8..96fd61c9f7010 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -48,7 +48,7 @@ public PutPipelineRequest(StreamInput in) throws IOException { id = in.readString(); source = in.readBytesReference(); xContentType = in.readEnum(XContentType.class); - if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + if (in.getVersion().onOrAfter(Version.V_7_16_0)) { version = in.readOptionalInt(); } else { version = null; @@ -86,7 +86,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeBytesReference(source); XContentHelper.writeTo(out, xContentType); - if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + if (out.getVersion().onOrAfter(Version.V_7_16_0)) { out.writeOptionalInt(version); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 06e345c1aa2ff..0b54eb9ef0af6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -19,14 +19,13 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AdjustableSemaphore; import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.Mapping; -import java.util.concurrent.Semaphore; - /** * Called by shards in the cluster when their mapping was dynamically updated and it needs to be updated * in the cluster state meta data (and broadcast to all members). @@ -106,30 +105,4 @@ protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListe client.execute(AutoPutMappingAction.INSTANCE, putMappingRequest, ActionListener.wrap(r -> listener.onResponse(null), listener::onFailure)); } - - static class AdjustableSemaphore extends Semaphore { - - private final Object maxPermitsMutex = new Object(); - private int maxPermits; - - AdjustableSemaphore(int maxPermits, boolean fair) { - super(maxPermits, fair); - this.maxPermits = maxPermits; - } - - void setMaxPermits(int permits) { - synchronized (maxPermitsMutex) { - final int diff = Math.subtractExact(permits, maxPermits); - if (diff > 0) { - // add permits - release(diff); - } else if (diff < 0) { - // remove permits - reducePermits(Math.negateExact(diff)); - } - - maxPermits = permits; - } - } - } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 50eccb7e1fdaa..a1f2606782f19 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -216,6 +216,7 @@ public void apply(Settings value, Settings current, Settings previous) { RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_OPERATIONS_SETTING, RecoverySettings.INDICES_RECOVERY_USE_SNAPSHOTS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS, + RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/AdjustableSemaphore.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/AdjustableSemaphore.java new file mode 100644 index 0000000000000..b89378cd0a6d9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/AdjustableSemaphore.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util.concurrent; + +import java.util.concurrent.Semaphore; + +public class AdjustableSemaphore extends Semaphore { + + private final Object maxPermitsMutex = new Object(); + private int maxPermits; + + public AdjustableSemaphore(int maxPermits, boolean fair) { + super(maxPermits, fair); + this.maxPermits = maxPermits; + } + + public void setMaxPermits(int permits) { + synchronized (maxPermitsMutex) { + final int diff = Math.subtractExact(permits, maxPermits); + if (diff > 0) { + // add permits + release(diff); + } else if (diff < 0) { + // remove permits + reducePermits(Math.negateExact(diff)); + } + + maxPermits = permits; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/env/Environment.java b/server/src/main/java/org/elasticsearch/env/Environment.java index 65dd885904f37..2409ad4a7012f 100644 --- a/server/src/main/java/org/elasticsearch/env/Environment.java +++ b/server/src/main/java/org/elasticsearch/env/Environment.java @@ -144,7 +144,13 @@ public Environment(final Settings settings, final Path configPath) { final Settings.Builder finalSettings = Settings.builder().put(settings); if (PATH_DATA_SETTING.exists(settings)) { - finalSettings.putList(PATH_DATA_SETTING.getKey(), Arrays.stream(dataFiles).map(Path::toString).collect(Collectors.toList())); + if (dataPathUsesList(settings)) { + finalSettings.putList(PATH_DATA_SETTING.getKey(), + Arrays.stream(dataFiles).map(Path::toString).collect(Collectors.toList())); + } else { + assert dataFiles.length == 1; + finalSettings.put(PATH_DATA_SETTING.getKey(), dataFiles[0]); + } } finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile); finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString()); @@ -303,7 +309,7 @@ public static boolean dataPathUsesList(Settings settings) { return false; } String rawDataPath = settings.get(PATH_DATA_SETTING.getKey()); - return rawDataPath.startsWith("["); + return rawDataPath.startsWith("[") || rawDataPath.contains(","); } public static FileStore getFileStore(final Path path) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 9153021c61a2f..471e93451532d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -138,9 +138,16 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh } public void startRecovery(final IndexShard indexShard, final DiscoveryNode sourceNode, final RecoveryListener listener) { + final Releasable snapshotFileDownloadsPermit = recoverySettings.tryAcquireSnapshotDownloadPermits(); // create a new recovery status, and process... - final long recoveryId = - onGoingRecoveries.startRecovery(indexShard, sourceNode, snapshotFilesProvider, listener, recoverySettings.activityTimeout()); + final long recoveryId = onGoingRecoveries.startRecovery( + indexShard, + sourceNode, + snapshotFilesProvider, + listener, + recoverySettings.activityTimeout(), + snapshotFileDownloadsPermit + ); // we fork off quickly here and go async but this is called from the cluster state applier thread too and that can cause // assertions to trip if we executed it on the same thread hence we fork off to the generic threadpool. threadPool.generic().execute(new RecoveryRunner(recoveryId)); @@ -267,7 +274,9 @@ public static StartRecoveryRequest getStartRecoveryRequest(Logger logger, Discov metadataSnapshot, recoveryTarget.state().getPrimary(), recoveryTarget.recoveryId(), - startingSeqNo); + startingSeqNo, + recoveryTarget.hasPermitToDownloadSnapshotFiles() + ); return request; } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java index 0646949c08c0f..7b2eb74bbacbc 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java @@ -14,6 +14,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardClosedException; @@ -54,8 +56,10 @@ public long startRecovery(IndexShard indexShard, DiscoveryNode sourceNode, SnapshotFilesProvider snapshotFilesProvider, PeerRecoveryTargetService.RecoveryListener listener, - TimeValue activityTimeout) { - RecoveryTarget recoveryTarget = new RecoveryTarget(indexShard, sourceNode, snapshotFilesProvider, listener); + TimeValue activityTimeout, + @Nullable Releasable snapshotFileDownloadsPermit) { + RecoveryTarget recoveryTarget = + new RecoveryTarget(indexShard, sourceNode, snapshotFilesProvider, snapshotFileDownloadsPermit, listener); startRecoveryInternal(recoveryTarget, activityTimeout); return recoveryTarget.recoveryId(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 7d0c7669b0f8a..dfd94131cc2ab 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -20,17 +20,29 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.AdjustableSemaphore; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.jdk.JavaVersion; import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.node.NodeRoleSettings; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; import java.util.List; +import java.util.Locale; +import java.util.Map; import java.util.stream.Collectors; +import static org.elasticsearch.common.settings.Setting.parseInt; + public class RecoverySettings { public static final Version SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = Version.V_7_15_0; public static final Version SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = Version.V_7_16_0; + public static final Version SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_VERSION = Version.V_8_0_0; private static final Logger logger = LogManager.getLogger(RecoverySettings.class); @@ -134,7 +146,7 @@ public class RecoverySettings { /** * recoveries would try to use files from available snapshots instead of sending them from the source node. - * defaults to `false` + * defaults to `true` */ public static final Setting INDICES_RECOVERY_USE_SNAPSHOTS_SETTING = Setting.boolSetting("indices.recovery.use_snapshots", true, Property.Dynamic, Property.NodeScope); @@ -148,6 +160,43 @@ public class RecoverySettings { Property.NodeScope ); + public static final Setting INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE = new Setting<>( + "indices.recovery.max_concurrent_snapshot_file_downloads_per_node", + "25", + (s) -> parseInt(s, 1, 25, "indices.recovery.max_concurrent_snapshot_file_downloads_per_node", false), + new Setting.Validator<>() { + private final Collection> dependencies = + Collections.singletonList(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS); + @Override + public void validate(Integer value) { + // ignore + } + + @Override + public void validate(Integer maxConcurrentSnapshotFileDownloadsPerNode, Map, Object> settings) { + int maxConcurrentSnapshotFileDownloads = (int) settings.get(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS); + if (maxConcurrentSnapshotFileDownloadsPerNode < maxConcurrentSnapshotFileDownloads) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, + "[%s]=%d is less than [%s]=%d", + INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), + maxConcurrentSnapshotFileDownloadsPerNode, + INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.getKey(), + maxConcurrentSnapshotFileDownloads + ) + ); + } + } + + @Override + public Iterator> settings() { + return dependencies.iterator(); + } + }, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); private volatile ByteSizeValue maxBytesPerSec; @@ -162,6 +211,9 @@ public class RecoverySettings { private volatile TimeValue internalActionLongTimeout; private volatile boolean useSnapshotsDuringRecovery; private volatile int maxConcurrentSnapshotFileDownloads; + private volatile int maxConcurrentSnapshotFileDownloadsPerNode; + + private final AdjustableSemaphore maxSnapshotFileDownloadsPerNodeSemaphore; private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; @@ -186,6 +238,8 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { } this.useSnapshotsDuringRecovery = INDICES_RECOVERY_USE_SNAPSHOTS_SETTING.get(settings); this.maxConcurrentSnapshotFileDownloads = INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.get(settings); + this.maxConcurrentSnapshotFileDownloadsPerNode = INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.get(settings); + this.maxSnapshotFileDownloadsPerNodeSemaphore = new AdjustableSemaphore(this.maxConcurrentSnapshotFileDownloadsPerNode, true); logger.debug("using max_bytes_per_sec[{}]", maxBytesPerSec); @@ -202,6 +256,8 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_USE_SNAPSHOTS_SETTING, this::setUseSnapshotsDuringRecovery); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS, this::setMaxConcurrentSnapshotFileDownloads); + clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE, + this::setMaxConcurrentSnapshotFileDownloadsPerNode); } public RateLimiter rateLimiter() { @@ -303,4 +359,30 @@ public int getMaxConcurrentSnapshotFileDownloads() { public void setMaxConcurrentSnapshotFileDownloads(int maxConcurrentSnapshotFileDownloads) { this.maxConcurrentSnapshotFileDownloads = maxConcurrentSnapshotFileDownloads; } + + private void setMaxConcurrentSnapshotFileDownloadsPerNode(int maxConcurrentSnapshotFileDownloadsPerNode) { + this.maxConcurrentSnapshotFileDownloadsPerNode = maxConcurrentSnapshotFileDownloadsPerNode; + this.maxSnapshotFileDownloadsPerNodeSemaphore.setMaxPermits(maxConcurrentSnapshotFileDownloadsPerNode); + } + + @Nullable + Releasable tryAcquireSnapshotDownloadPermits() { + final int maxConcurrentSnapshotFileDownloads = getMaxConcurrentSnapshotFileDownloads(); + final boolean permitAcquired = maxSnapshotFileDownloadsPerNodeSemaphore.tryAcquire(maxConcurrentSnapshotFileDownloads); + if (getUseSnapshotsDuringRecovery() == false || permitAcquired == false) { + if (permitAcquired == false) { + logger.warn(String.format(Locale.ROOT, + "Unable to acquire permit to use snapshot files during recovery, " + + "this recovery will recover index files from the source node. " + + "Ensure snapshot files can be used during recovery by setting [%s] to be no greater than [%d]", + INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.getKey(), + this.maxConcurrentSnapshotFileDownloadsPerNode + ) + ); + } + return null; + } + + return Releasables.releaseOnce(() -> maxSnapshotFileDownloadsPerNodeSemaphore.release(maxConcurrentSnapshotFileDownloads)); + } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 145d7a88e23d2..a37af5f6928be 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -486,6 +486,7 @@ void phase1(IndexCommit snapshot, long startingSeqNo, IntSupplier translogOps, A } if (canSkipPhase1(recoverySourceMetadata, request.metadataSnapshot()) == false) { cancellableThreads.checkForCancel(); + final boolean canUseSnapshots = useSnapshots && request.canDownloadSnapshotFiles(); recoveryPlannerService.computeRecoveryPlan(shard.shardId(), shardStateIdentifier, recoverySourceMetadata, @@ -493,7 +494,7 @@ void phase1(IndexCommit snapshot, long startingSeqNo, IntSupplier translogOps, A startingSeqNo, translogOps.getAsInt(), getRequest().targetNode().getVersion(), - useSnapshots, + canUseSnapshots, ActionListener.wrap(plan -> recoverFilesFromSourceAndSnapshot(plan, store, stopWatch, listener), listener::onFailure) ); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 212c221a4786e..1a8e29e48c2de 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -67,6 +67,8 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget private final IndexShard indexShard; private final DiscoveryNode sourceNode; private final SnapshotFilesProvider snapshotFilesProvider; + @Nullable // if we're not downloading files from snapshots in this recovery + private final Releasable snapshotFileDownloadsPermit; private final MultiFileWriter multiFileWriter; private final RecoveryRequestTracker requestTracker = new RecoveryRequestTracker(); private final Store store; @@ -89,11 +91,15 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget * * @param indexShard local shard where we want to recover to * @param sourceNode source node of the recovery where we recover from + * @param snapshotFileDownloadsPermit a permit that allows to download files from a snapshot, + * limiting the concurrent snapshot file downloads per node + * preventing the exhaustion of repository resources. * @param listener called when recovery is completed/failed */ public RecoveryTarget(IndexShard indexShard, DiscoveryNode sourceNode, SnapshotFilesProvider snapshotFilesProvider, + @Nullable Releasable snapshotFileDownloadsPermit, PeerRecoveryTargetService.RecoveryListener listener) { this.cancellableThreads = new CancellableThreads(); this.recoveryId = idGenerator.incrementAndGet(); @@ -102,6 +108,7 @@ public RecoveryTarget(IndexShard indexShard, this.indexShard = indexShard; this.sourceNode = sourceNode; this.snapshotFilesProvider = snapshotFilesProvider; + this.snapshotFileDownloadsPermit = snapshotFileDownloadsPermit; this.shardId = indexShard.shardId(); final String tempFilePrefix = RECOVERY_PREFIX + UUIDs.randomBase64UUID() + "."; this.multiFileWriter = new MultiFileWriter(indexShard.store(), indexShard.recoveryState().getIndex(), tempFilePrefix, logger, @@ -118,7 +125,7 @@ public RecoveryTarget(IndexShard indexShard, * @return a copy of this recovery target */ public RecoveryTarget retryCopy() { - return new RecoveryTarget(indexShard, sourceNode, snapshotFilesProvider, listener); + return new RecoveryTarget(indexShard, sourceNode, snapshotFilesProvider, snapshotFileDownloadsPermit, listener); } @Nullable @@ -151,6 +158,10 @@ public CancellableThreads cancellableThreads() { return cancellableThreads; } + public boolean hasPermitToDownloadSnapshotFiles() { + return snapshotFileDownloadsPermit != null; + } + /** return the last time this RecoveryStatus was used (based on System.nanoTime() */ public long lastAccessTime() { if (recoveryMonitorEnabled) { @@ -288,6 +299,13 @@ protected void closeInternal() { store.decRef(); indexShard.recoveryStats().decCurrentAsTarget(); closedLatch.countDown(); + releaseSnapshotFileDownloadsPermit(); + } + } + + private void releaseSnapshotFileDownloadsPermit() { + if (snapshotFileDownloadsPermit != null) { + snapshotFileDownloadsPermit.close(); } } @@ -506,6 +524,8 @@ public void restoreFileFromSnapshot(String repository, IndexId indexId, BlobStoreIndexShardSnapshot.FileInfo fileInfo, ActionListener listener) { + assert hasPermitToDownloadSnapshotFiles(); + try (InputStream inputStream = snapshotFilesProvider.getInputStreamForSnapshotFile(repository, indexId, shardId, fileInfo, this::registerThrottleTime)) { StoreFileMetadata metadata = fileInfo.metadata(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java index bb90af079b316..5731ab3987e2c 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java @@ -31,6 +31,7 @@ public class StartRecoveryRequest extends TransportRequest { private Store.MetadataSnapshot metadataSnapshot; private boolean primaryRelocation; private long startingSeqNo; + private boolean canDownloadSnapshotFiles; public StartRecoveryRequest(StreamInput in) throws IOException { super(in); @@ -42,19 +43,25 @@ public StartRecoveryRequest(StreamInput in) throws IOException { metadataSnapshot = new Store.MetadataSnapshot(in); primaryRelocation = in.readBoolean(); startingSeqNo = in.readLong(); + if (in.getVersion().onOrAfter(RecoverySettings.SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_VERSION)) { + canDownloadSnapshotFiles = in.readBoolean(); + } else { + canDownloadSnapshotFiles = true; + } } /** * Construct a request for starting a peer recovery. * - * @param shardId the shard ID to recover - * @param targetAllocationId the allocation id of the target shard - * @param sourceNode the source node to remover from - * @param targetNode the target node to recover to - * @param metadataSnapshot the Lucene metadata - * @param primaryRelocation whether or not the recovery is a primary relocation - * @param recoveryId the recovery ID - * @param startingSeqNo the starting sequence number + * @param shardId the shard ID to recover + * @param targetAllocationId the allocation id of the target shard + * @param sourceNode the source node to remover from + * @param targetNode the target node to recover to + * @param metadataSnapshot the Lucene metadata + * @param primaryRelocation whether or not the recovery is a primary relocation + * @param recoveryId the recovery ID + * @param startingSeqNo the starting sequence number + * @param canDownloadSnapshotFiles flag that indicates if the snapshot files can be downloaded */ public StartRecoveryRequest(final ShardId shardId, final String targetAllocationId, @@ -63,7 +70,8 @@ public StartRecoveryRequest(final ShardId shardId, final Store.MetadataSnapshot metadataSnapshot, final boolean primaryRelocation, final long recoveryId, - final long startingSeqNo) { + final long startingSeqNo, + final boolean canDownloadSnapshotFiles) { this.recoveryId = recoveryId; this.shardId = shardId; this.targetAllocationId = targetAllocationId; @@ -72,6 +80,7 @@ public StartRecoveryRequest(final ShardId shardId, this.metadataSnapshot = metadataSnapshot; this.primaryRelocation = primaryRelocation; this.startingSeqNo = startingSeqNo; + this.canDownloadSnapshotFiles = canDownloadSnapshotFiles; assert startingSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || metadataSnapshot.getHistoryUUID() != null : "starting seq no is set but not history uuid"; } @@ -108,6 +117,10 @@ public long startingSeqNo() { return startingSeqNo; } + public boolean canDownloadSnapshotFiles() { + return canDownloadSnapshotFiles; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -119,5 +132,8 @@ public void writeTo(StreamOutput out) throws IOException { metadataSnapshot.writeTo(out); out.writeBoolean(primaryRelocation); out.writeLong(startingSeqNo); + if (out.getVersion().onOrAfter(RecoverySettings.SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_VERSION)) { + out.writeBoolean(canDownloadSnapshotFiles); + } } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java index 131112a0ad29f..ca2f4653f85df 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java @@ -81,6 +81,9 @@ public static ClusterHealthRequest fromRequest(final RestRequest request) { if (request.param("wait_for_events") != null) { clusterHealthRequest.waitForEvents(Priority.valueOf(request.param("wait_for_events").toUpperCase(Locale.ROOT))); } + clusterHealthRequest.return200ForClusterHealthTimeout(request.paramAsBoolean( + "return_200_for_cluster_health_timeout", + clusterHealthRequest.doesReturn200ForClusterHealthTimeout())); return clusterHealthRequest; } diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index af67aeb1fd0ae..46ac90b42a840 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -16,13 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; @@ -49,6 +43,12 @@ import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestTests.java index 514d59d45c0ef..8608d9f3a6886 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestTests.java @@ -43,6 +43,7 @@ public void testSerialize() throws Exception { assertThat(cloneRequest.waitForEvents(), equalTo(originalRequest.waitForEvents())); assertIndicesEquals(cloneRequest.indices(), originalRequest.indices()); assertThat(cloneRequest.indicesOptions(), equalTo(originalRequest.indicesOptions())); + assertThat(cloneRequest.doesReturn200ForClusterHealthTimeout(), equalTo(originalRequest.doesReturn200ForClusterHealthTimeout())); } public void testRequestReturnsHiddenIndicesByDefault() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index 01ad7731d0f76..fd3a63dd290a1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -20,10 +20,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; import org.hamcrest.Matchers; import java.io.IOException; @@ -43,7 +43,7 @@ public class ClusterHealthResponsesTests extends AbstractSerializingTestCase void sendResponse(TransportResponseHandler handler, TransportResponse resp) { - threadPool.executor(ThreadPool.Names.MANAGEMENT).submit(new AbstractRunnable() { + threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION).submit(new AbstractRunnable() { @Override public void onFailure(Exception e) { throw new AssertionError(e); @@ -765,20 +764,6 @@ protected void doRun() { } }); } - - void sendFailure(TransportResponseHandler handler, Exception e) { - threadPool.executor(ThreadPool.Names.MANAGEMENT).submit(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - throw new AssertionError(e); - } - - @Override - protected void doRun() { - handler.handleException(new TransportException(e)); - } - }); - } } static FieldCapabilitiesRequest randomFieldCapRequest(boolean withFilter) { diff --git a/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java index ae229c6df6444..5f6c003bee155 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/index/MappingUpdatedActionTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction.AdjustableSemaphore; +import org.elasticsearch.common.util.concurrent.AdjustableSemaphore; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 92e9d6ac5aed4..afe24a65df677 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; +import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; @@ -1847,13 +1848,17 @@ public void testLogsMessagesIfPublicationDelayed() throws IllegalAccessException "node [" + brokenNode + "] is lagging at cluster state version [*], " + "although publication of cluster state version [*] completed [*] ago")); - mockLogAppender.addExpectation(new MockLogAppender.SeenEventExpectation( - "hot threads from lagging node", - LagDetector.class.getCanonicalName(), - Level.DEBUG, - "hot threads from node [" + - brokenNode.getLocalNode().descriptionWithoutAttributes() + - "] lagging at version [*] despite commit of cluster state version [*]:\nHot threads at*")); + if (Constants.WINDOWS == false) { + // log messages containing control characters are hidden from the log assertions framework, and this includes the + // `\r` that Windows uses in its line endings, so we only see this message on systems with `\n` line endings: + mockLogAppender.addExpectation(new MockLogAppender.SeenEventExpectation( + "hot threads from lagging node", + LagDetector.class.getCanonicalName(), + Level.DEBUG, + "hot threads from node [" + + brokenNode.getLocalNode().descriptionWithoutAttributes() + + "] lagging at version [*] despite commit of cluster state version [*]:\nHot threads at*")); + } // drop the publication messages to one node, but then restore connectivity so it remains in the cluster and does not fail // health checks diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java index c5f6dff52be66..14b733bee3952 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java @@ -316,12 +316,7 @@ public void testPrefixedNamesFilteringTest() { public void testNestedFiltering() { Map map = new HashMap<>(); map.put("field", "value"); - map.put("array", Arrays.asList(1, new HashMap() { - { - put("nested", 2); - put("nested_2", 3); - } - })); + map.put("array", Arrays.asList(1, Map.of("nested", 2, "nested_2", 3))); Map filteredMap = XContentMapValues.filter(map, new String[] { "array.nested" }, Strings.EMPTY_ARRAY); assertThat(filteredMap.size(), equalTo(1)); @@ -336,12 +331,7 @@ public void testNestedFiltering() { map.clear(); map.put("field", "value"); - map.put("obj", new HashMap() { - { - put("field", "value"); - put("field2", "value2"); - } - }); + map.put("obj", Map.of("field", "value", "field2", "value2")); filteredMap = XContentMapValues.filter(map, new String[] { "obj.field" }, Strings.EMPTY_ARRAY); assertThat(filteredMap.size(), equalTo(1)); assertThat(((Map) filteredMap.get("obj")).size(), equalTo(1)); @@ -359,18 +349,8 @@ public void testNestedFiltering() { public void testCompleteObjectFiltering() { Map map = new HashMap<>(); map.put("field", "value"); - map.put("obj", new HashMap() { - { - put("field", "value"); - put("field2", "value2"); - } - }); - map.put("array", Arrays.asList(1, new HashMap() { - { - put("field", "value"); - put("field2", "value2"); - } - })); + map.put("obj", Map.of("field", "value", "field2", "value2")); + map.put("array", Arrays.asList(1, Map.of("field", "value", "field2", "value2"))); Map filteredMap = XContentMapValues.filter(map, new String[] { "obj" }, Strings.EMPTY_ARRAY); assertThat(filteredMap.size(), equalTo(1)); @@ -401,18 +381,8 @@ public void testCompleteObjectFiltering() { public void testFilterIncludesUsingStarPrefix() { Map map = new HashMap<>(); map.put("field", "value"); - map.put("obj", new HashMap() { - { - put("field", "value"); - put("field2", "value2"); - } - }); - map.put("n_obj", new HashMap() { - { - put("n_field", "value"); - put("n_field2", "value2"); - } - }); + map.put("obj", Map.of("field", "value", "field2", "value2")); + map.put("n_obj", Map.of("n_field", "value", "n_field2", "value2")); Map filteredMap = XContentMapValues.filter(map, new String[] { "*.field2" }, Strings.EMPTY_ARRAY); assertThat(filteredMap.size(), equalTo(1)); @@ -546,6 +516,11 @@ public void testDotsInFieldNames() { assertEquals(expected, filtered); } + /** + * Tests that we can extract paths containing non-ascii characters. + * See {@link AbstractFilteringTestCase#testFilterSupplementaryCharactersInPaths()} + * for a similar test but for XContent. + */ public void testSupplementaryCharactersInPaths() { Map map = new HashMap<>(); map.put("搜索", 2); @@ -555,6 +530,11 @@ public void testSupplementaryCharactersInPaths() { assertEquals(Collections.singletonMap("指数", 3), XContentMapValues.filter(map, new String[0], new String[] { "搜索" })); } + /** + * Tests that we can extract paths which share a prefix with other paths. + * See {@link AbstractFilteringTestCase#testFilterSharedPrefixes()} + * for a similar test but for XContent. + */ public void testSharedPrefixes() { Map map = new HashMap<>(); map.put("foobar", 2); @@ -633,6 +613,11 @@ public void testEmptyObjectsSubFieldsInclusion() { } } + /** + * Tests that we can extract paths which have another path as a prefix. + * See {@link AbstractFilteringTestCase#testFilterPrefix()} + * for a similar test but for XContent. + */ public void testPrefix() { Map map = new HashMap<>(); map.put("photos", Arrays.asList(new String[] { "foo", "bar" })); diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index ef0fd1e621308..21618c273da24 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -21,6 +21,7 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -72,6 +73,15 @@ public void testPathDataNotSetInEnvironmentIfNotSet() { assertFalse(Environment.PATH_DATA_SETTING.exists(environment.settings())); } + public void testPathDataLegacyCommaList() { + final Settings settings = Settings.builder() + .put("path.home", createTempDir().toAbsolutePath()) + .put("path.data", createTempDir().toAbsolutePath() + "," + createTempDir().toAbsolutePath()) + .build(); + final Environment environment = new Environment(settings, null); + assertThat(environment.dataFiles(), arrayWithSize(2)); + } + public void testPathLogsWhenNotSet() { final Path pathHome = createTempDir().toAbsolutePath(); final Settings settings = Settings.builder().put("path.home", pathHome).build(); @@ -173,6 +183,11 @@ public void testSingleDataPathListCheck() { .putList(Environment.PATH_DATA_SETTING.getKey(), createTempDir().toString()).build(); assertThat(Environment.dataPathUsesList(settings), is(true)); } + { + final Settings settings = Settings.builder() + .put(Environment.PATH_DATA_SETTING.getKey(), createTempDir().toString() + "," + createTempDir().toString()).build(); + assertThat(Environment.dataPathUsesList(settings), is(true)); + } } private void assertPath(final String actual, final Path expected) { diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index f8c49e24a0530..1e68fa3434fd6 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -116,7 +116,7 @@ public void run() { thread.start(); IndexShard replica = shards.addReplica(); Future future = shards.asyncRecoverReplica(replica, - (indexShard, node) -> new RecoveryTarget(indexShard, node, null, recoveryListener) { + (indexShard, node) -> new RecoveryTarget(indexShard, node, null, null, recoveryListener) { @Override public void cleanFiles(int totalTranslogOps, long globalCheckpoint, Store.MetadataSnapshot sourceMetadata, ActionListener listener) { @@ -193,7 +193,7 @@ public IndexResult index(Index op) throws IOException { thread.start(); IndexShard replica = shards.addReplica(); Future fut = shards.asyncRecoverReplica(replica, - (shard, node) -> new RecoveryTarget(shard, node, null, recoveryListener) { + (shard, node) -> new RecoveryTarget(shard, node, null, null, recoveryListener) { @Override public void prepareForTranslogOperations(int totalTranslogOps, ActionListener listener) { try { diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index c32db56d83a9e..4a9d5aca9fa2b 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -423,7 +423,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { AtomicBoolean recoveryDone = new AtomicBoolean(false); final Future recoveryFuture = shards.asyncRecoverReplica(newReplica, (indexShard, node) -> { recoveryStart.countDown(); - return new RecoveryTarget(indexShard, node, null, recoveryListener) { + return new RecoveryTarget(indexShard, node, null, null, recoveryListener) { @Override public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener listener) { recoveryDone.set(true); @@ -478,7 +478,7 @@ protected EngineFactory getEngineFactory(final ShardRouting routing) { final IndexShard replica = shards.addReplica(); final Future recoveryFuture = shards.asyncRecoverReplica( replica, - (indexShard, node) -> new RecoveryTarget(indexShard, node, null, recoveryListener) { + (indexShard, node) -> new RecoveryTarget(indexShard, node, null, null, recoveryListener) { @Override public void indexTranslogOperations( final List operations, @@ -743,7 +743,7 @@ public static class BlockingTarget extends RecoveryTarget { public BlockingTarget(RecoveryState.Stage stageToBlock, CountDownLatch recoveryBlocked, CountDownLatch releaseRecovery, IndexShard shard, DiscoveryNode sourceNode, PeerRecoveryTargetService.RecoveryListener listener, Logger logger) { - super(shard, sourceNode, null, listener); + super(shard, sourceNode, null, null, listener); this.recoveryBlocked = recoveryBlocked; this.releaseRecovery = releaseRecovery; this.stageToBlock = stageToBlock; diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index b100d61f1fbd4..e4f0bc9253460 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2536,7 +2536,7 @@ public void testTranslogRecoverySyncsTranslog() throws IOException { indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); IndexShard replica = newShard(primary.shardId(), false, "n2", metadata, null); recoverReplica(replica, primary, (shard, discoveryNode) -> - new RecoveryTarget(shard, discoveryNode, null, recoveryListener) { + new RecoveryTarget(shard, discoveryNode, null, null, recoveryListener) { @Override public void indexTranslogOperations( final List operations, @@ -2643,7 +2643,7 @@ public void testShardActiveDuringPeerRecovery() throws IOException { // Shard is still inactive since we haven't started recovering yet assertFalse(replica.isActive()); recoverReplica(replica, primary, (shard, discoveryNode) -> - new RecoveryTarget(shard, discoveryNode, null, recoveryListener) { + new RecoveryTarget(shard, discoveryNode, null, null, recoveryListener) { @Override public void indexTranslogOperations( final List operations, @@ -2702,7 +2702,7 @@ public void testRefreshListenersDuringPeerRecovery() throws IOException { replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); assertListenerCalled.accept(replica); recoverReplica(replica, primary, (shard, discoveryNode) -> - new RecoveryTarget(shard, discoveryNode, null, recoveryListener) { + new RecoveryTarget(shard, discoveryNode, null, null, recoveryListener) { // we're only checking that listeners are called when the engine is open, before there is no point @Override public void prepareForTranslogOperations(int totalTranslogOps, ActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 5e73e49d44ed6..b9ca2026478fc 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -188,8 +188,8 @@ public void testLoadEmptyShards() throws IOException { public void testShardPathSelection() throws IOException { try (NodeEnvironment env = newNodeEnvironment(Settings.builder().build())) { - NodeEnvironment.NodePath path = env.nodePaths()[0]; - assertEquals(path, ShardPath.getPathWithMostFreeSpace(env)); + NodeEnvironment.NodePath[] paths = env.nodePaths(); + assertThat(List.of(paths), hasItem(ShardPath.getPathWithMostFreeSpace(env))); ShardId shardId = new ShardId("foo", "0xDEADBEEF", 0); Settings indexSettings = Settings.builder() @@ -198,7 +198,13 @@ public void testShardPathSelection() throws IOException { ShardPath shardPath = ShardPath.selectNewPathForShard(env, shardId, idxSettings, 1L, new HashMap<>()); assertNotNull(shardPath.getDataPath()); - assertEquals(path.indicesPath.resolve("0xDEADBEEF").resolve("0"), shardPath.getDataPath()); + + List indexPaths = new ArrayList<>(); + for (NodeEnvironment.NodePath nodePath : paths) { + indexPaths.add(nodePath.indicesPath.resolve("0xDEADBEEF").resolve("0")); + } + + assertThat(indexPaths, hasItem(shardPath.getDataPath())); assertEquals("0xDEADBEEF", shardPath.getShardId().getIndex().getUUID()); assertEquals("foo", shardPath.getShardId().getIndexName()); assertFalse(shardPath.isCustomDataPath()); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java index 1a91d467df12a..32272130bd617 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java @@ -40,7 +40,7 @@ public void testDuplicateRecoveries() throws IOException { mock(SnapshotsRecoveryPlannerService.class)); StartRecoveryRequest startRecoveryRequest = new StartRecoveryRequest(primary.shardId(), randomAlphaOfLength(10), getFakeDiscoNode("source"), getFakeDiscoNode("target"), Store.MetadataSnapshot.EMPTY, randomBoolean(), randomLong(), - SequenceNumbers.UNASSIGNED_SEQ_NO); + SequenceNumbers.UNASSIGNED_SEQ_NO, true); peerRecoverySourceService.start(); RecoverySourceHandler handler = peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, primary); DelayRecoveryException delayRecoveryException = expectThrows(DelayRecoveryException.class, diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java index fe72340870728..bc98497f0bcda 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.NoOpEngine; @@ -47,6 +47,7 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.xcontent.XContentType; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -64,6 +65,7 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongConsumer; import java.util.stream.Collectors; import java.util.stream.LongStream; @@ -97,7 +99,7 @@ public void testWriteFileChunksConcurrently() throws Exception { final DiscoveryNode pNode = getFakeDiscoNode(sourceShard.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(targetShard.routingEntry().currentNodeId()); targetShard.markAsRecovering("test-peer-recovery", new RecoveryState(targetShard.routingEntry(), rNode, pNode)); - final RecoveryTarget recoveryTarget = new RecoveryTarget(targetShard, null, null, null); + final RecoveryTarget recoveryTarget = new RecoveryTarget(targetShard, null, null, null, null); final PlainActionFuture receiveFileInfoFuture = new PlainActionFuture<>(); recoveryTarget.receiveFileInfo( mdFiles.stream().map(StoreFileMetadata::name).collect(Collectors.toList()), @@ -297,7 +299,7 @@ public void testResetStartingSeqNoIfLastCommitCorrupted() throws Exception { shard.prepareForIndexRecovery(); long startingSeqNo = shard.recoverLocallyUpToGlobalCheckpoint(); shard.store().markStoreCorrupted(new IOException("simulated")); - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null, null, null); StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest(logger, rNode, recoveryTarget, startingSeqNo); assertThat(request.startingSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(request.metadataSnapshot().size(), equalTo(0)); @@ -324,7 +326,7 @@ public void testResetStartRequestIfTranslogIsCorrupted() throws Exception { shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode)); shard.prepareForIndexRecovery(); - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null, null, null); StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest( logger, rNode, recoveryTarget, randomNonNegativeLong()); assertThat(request.startingSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); @@ -385,7 +387,7 @@ public int getReadSnapshotFileBufferSizeForRepo(String repository) { recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false); recoveryStateIndex.setFileDetailsComplete(); - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, () -> {}, null); PlainActionFuture writeSnapshotFileFuture = PlainActionFuture.newFuture(); recoveryTarget.restoreFileFromSnapshot(repositoryName, indexId, fileInfo, writeSnapshotFileFuture); @@ -457,7 +459,7 @@ public int getReadSnapshotFileBufferSizeForRepo(String repository) { recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false); recoveryStateIndex.setFileDetailsComplete(); - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, () -> {}, null); String repositoryName = "repo"; IndexId indexId = new IndexId("index", "uuid"); @@ -565,7 +567,7 @@ public int getReadSnapshotFileBufferSizeForRepo(String repository) { } }; - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, () -> {}, null); String[] fileNamesBeforeRecoveringSnapshotFiles = directory.listAll(); @@ -631,7 +633,7 @@ public int getReadSnapshotFileBufferSizeForRepo(String repository) { recoveryStateIndex.addFileDetail(storeFileMetadata.name(), storeFileMetadata.length(), false); recoveryStateIndex.setFileDetailsComplete(); - RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, null); + RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, snapshotFilesProvider, () -> {}, null); String repository = "repo"; IndexId indexId = new IndexId("index", "uuid"); @@ -664,6 +666,29 @@ public int getReadSnapshotFileBufferSizeForRepo(String repository) { closeShards(shard); } + public void testSnapshotFileDownloadPermitIsReleasedAfterClosingRecoveryTarget() throws Exception { + DiscoveryNode pNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), + Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); + DiscoveryNode rNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), + Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); + + IndexShard shard = newShard(false); + shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode)); + shard.prepareForIndexRecovery(); + + AtomicBoolean snapshotFileDownloadsPermitFlag = new AtomicBoolean(); + Releasable snapshotFileDownloadsPermit = () -> { + assertThat(snapshotFileDownloadsPermitFlag.compareAndSet(false, true), is(equalTo(true))); + }; + RecoveryTarget recoveryTarget = + new RecoveryTarget(shard, null, null, snapshotFileDownloadsPermit, null); + + recoveryTarget.decRef(); + + assertThat(snapshotFileDownloadsPermitFlag.get(), is(equalTo(true))); + closeShards(shard); + } + private Tuple createStoreFileMetadataWithRandomContent(String fileName) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput("test", "file", out, 1024)) { diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySettingsTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySettingsTests.java new file mode 100644 index 0000000000000..559da2ad71815 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySettingsTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.indices.recovery; + +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS; +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE; +import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_USE_SNAPSHOTS_SETTING; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class RecoverySettingsTests extends ESTestCase { + public void testSnapshotDownloadPermitsAreNotGrantedWhenSnapshotsUseFlagIsFalse() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + RecoverySettings recoverySettings = new RecoverySettings( + Settings.builder() + .put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), 5) + .put(INDICES_RECOVERY_USE_SNAPSHOTS_SETTING.getKey(), false) + .build(), + clusterSettings + ); + + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(nullValue())); + } + + public void testGrantsSnapshotDownloadPermitsUpToMaxPermits() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + RecoverySettings recoverySettings = new RecoverySettings( + Settings.builder().put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), 5).build(), + clusterSettings + ); + + Releasable permit = recoverySettings.tryAcquireSnapshotDownloadPermits(); + assertThat(permit, is(notNullValue())); + + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(nullValue())); + + permit.close(); + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(notNullValue())); + } + + public void testSnapshotDownloadPermitCanBeDynamicallyUpdated() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + RecoverySettings recoverySettings = new RecoverySettings( + Settings.builder().put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), 5).build(), + clusterSettings + ); + + Releasable permit = recoverySettings.tryAcquireSnapshotDownloadPermits(); + assertThat(permit, is(notNullValue())); + + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(nullValue())); + clusterSettings.applySettings( + Settings.builder().put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), 10).build() + ); + + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(notNullValue())); + assertThat(recoverySettings.tryAcquireSnapshotDownloadPermits(), is(nullValue())); + permit.close(); + } + + public void testMaxConcurrentSnapshotFileDownloadsPerNodeIsValidated() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + Settings settings = Settings.builder() + .put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS.getKey(), 10) + .put(INDICES_RECOVERY_MAX_CONCURRENT_SNAPSHOT_FILE_DOWNLOADS_PER_NODE.getKey(), 5) + .build(); + IllegalArgumentException exception = + expectThrows(IllegalArgumentException.class, () -> new RecoverySettings(settings, clusterSettings)); + assertThat(exception.getMessage(), + containsString("[indices.recovery.max_concurrent_snapshot_file_downloads_per_node]=5 " + + "is less than [indices.recovery.max_concurrent_snapshot_file_downloads]=10") + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index c5a30e4ef98ae..eea77a8cd121e 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -229,7 +229,9 @@ public StartRecoveryRequest getStartRecoveryRequest() throws IOException { randomBoolean(), randomNonNegativeLong(), randomBoolean() || metadataSnapshot.getHistoryUUID() == null ? - SequenceNumbers.UNASSIGNED_SEQ_NO : randomNonNegativeLong()); + SequenceNumbers.UNASSIGNED_SEQ_NO : randomNonNegativeLong(), + true + ); } public void testSendSnapshotSendsOps() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 29f82bfc64d38..c88defab8222b 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -257,7 +257,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { IndexShard replicaShard = newShard(primaryShard.shardId(), false); updateMappings(replicaShard, primaryShard.indexSettings().getIndexMetadata()); recoverReplica(replicaShard, primaryShard, - (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, recoveryListener) { + (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, null, recoveryListener) { @Override public void prepareForTranslogOperations(int totalTranslogOps, ActionListener listener) { super.prepareForTranslogOperations(totalTranslogOps, listener); @@ -369,7 +369,7 @@ public long addDocument(Iterable doc) throws IOExcepti IndexShard replica = group.addReplica(); expectThrows(Exception.class, () -> group.recoverReplica(replica, (shard, sourceNode) -> { - return new RecoveryTarget(shard, sourceNode, null, new PeerRecoveryTargetService.RecoveryListener() { + return new RecoveryTarget(shard, sourceNode, null, null, new PeerRecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state, ShardLongFieldRange timestampMillisFieldRange) { throw new AssertionError("recovery must fail"); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java index 21763e71753f8..95ecfd71dfcd7 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java @@ -44,7 +44,8 @@ public void testSerialization() throws Exception { randomBoolean(), randomNonNegativeLong(), randomBoolean() || metadataSnapshot.getHistoryUUID() == null ? - SequenceNumbers.UNASSIGNED_SEQ_NO : randomNonNegativeLong()); + SequenceNumbers.UNASSIGNED_SEQ_NO : randomNonNegativeLong(), + randomBoolean()); final ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); final OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java b/server/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java index dd6f13c8f2796..5cd177f658ede 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java +++ b/server/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java @@ -150,6 +150,13 @@ long startRecovery(RecoveriesCollection collection, DiscoveryNode sourceNode, In final DiscoveryNode rNode = getDiscoveryNode(indexShard.routingEntry().currentNodeId()); indexShard.markAsRecovering("remote", new RecoveryState(indexShard.routingEntry(), sourceNode, rNode)); indexShard.prepareForIndexRecovery(); - return collection.startRecovery(indexShard, sourceNode, null, listener, timeValue); + return collection.startRecovery( + indexShard, + sourceNode, + null, + listener, + timeValue, + null + ); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthActionTests.java index 5e9e5662d6e04..e42c49f2f93b2 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthActionTests.java @@ -51,6 +51,8 @@ public void testFromRequest() { params.put("wait_for_active_shards", String.valueOf(waitForActiveShards)); params.put("wait_for_nodes", waitForNodes); params.put("wait_for_events", waitForEvents.name()); + boolean requestTimeout200 = randomBoolean(); + params.put("return_200_for_cluster_health_timeout", String.valueOf(requestTimeout200)); FakeRestRequest restRequest = buildRestRequest(params); ClusterHealthRequest clusterHealthRequest = RestClusterHealthAction.fromRequest(restRequest); @@ -65,7 +67,7 @@ public void testFromRequest() { assertThat(clusterHealthRequest.waitForActiveShards(), equalTo(ActiveShardCount.parseString(String.valueOf(waitForActiveShards)))); assertThat(clusterHealthRequest.waitForNodes(), equalTo(waitForNodes)); assertThat(clusterHealthRequest.waitForEvents(), equalTo(waitForEvents)); - + assertThat(clusterHealthRequest.doesReturn200ForClusterHealthTimeout(), equalTo(requestTimeout200)); } private FakeRestRequest buildRestRequest(Map params) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java similarity index 92% rename from server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java rename to test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java index fbb5a8c84372b..c5bb14133042e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.PathUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.FilterXContentParser; @@ -21,15 +20,18 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.Set; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; import static java.util.stream.Collectors.toSet; +import static org.hamcrest.Matchers.notNullValue; /** * Tests for {@link XContent} filtering. @@ -46,17 +48,16 @@ protected interface Builder extends CheckedFunction { - try ( - XContentParser parser = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - AbstractFilteringTestCase.class.getResourceAsStream(file) - ) - ) { - // copyCurrentStructure does not property handle filters when it is passed a json parser. So we hide it. - return builder.copyCurrentStructure(new FilterXContentParser(parser) { - }); + try (InputStream stream = AbstractFilteringTestCase.class.getResourceAsStream(file)) { + assertThat("Couldn't find [" + file + "]", stream, notNullValue()); + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream) + ) { + // copyCurrentStructure does not property handle filters when it is passed a json parser. So we hide it. + return builder.copyCurrentStructure(new FilterXContentParser(parser) { + }); + } } }; } @@ -397,7 +398,7 @@ public void testBasics() throws Exception { } /** - * Generalization of {@link XContentMapValuesTests#testSupplementaryCharactersInPaths()} + * Tests that we can extract paths containing non-ascii characters. */ public void testFilterSupplementaryCharactersInPaths() throws IOException { Builder sample = builder -> builder.startObject().field("搜索", 2).field("指数", 3).endObject(); @@ -410,7 +411,7 @@ public void testFilterSupplementaryCharactersInPaths() throws IOException { } /** - * Generalization of {@link XContentMapValuesTests#testSharedPrefixes()} + * Tests that we can extract paths which share a prefix with other paths. */ public void testFilterSharedPrefixes() throws IOException { Builder sample = builder -> builder.startObject().field("foobar", 2).field("foobaz", 3).endObject(); @@ -423,7 +424,7 @@ public void testFilterSharedPrefixes() throws IOException { } /** - * Generalization of {@link XContentMapValuesTests#testPrefix()} + * Tests that we can extract paths which have another path as a prefix. */ public void testFilterPrefix() throws IOException { Builder sample = builder -> builder.startObject().array("photos", "foo", "bar").field("photosCount", 2).endObject(); @@ -447,10 +448,12 @@ public void testManyFilters() throws IOException, URISyntaxException { .endObject() .endObject() .endObject(); - Set manyFilters = Files.readAllLines( - PathUtils.get(AbstractFilteringTestCase.class.getResource("many_filters.txt").toURI()), - StandardCharsets.UTF_8 - ).stream().filter(s -> false == s.startsWith("#")).collect(toSet()); - testFilter(deep, deep, manyFilters, emptySet()); + try (InputStream stream = AbstractFilteringTestCase.class.getResourceAsStream("many_filters.txt")) { + assertThat("Couldn't find [many_filters.txt]", stream, notNullValue()); + Set manyFilters = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines() + .filter(s -> false == s.startsWith("#")) + .collect(toSet()); + testFilter(deep, deep, manyFilters, emptySet()); + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 1c73e60d653e3..7248bcf10fa1b 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -395,7 +395,7 @@ public synchronized boolean removeReplica(IndexShard replica) throws IOException public void recoverReplica(IndexShard replica) throws IOException { recoverReplica(replica, - (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, recoveryListener)); + (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, null, recoveryListener)); } public void recoverReplica(IndexShard replica, BiFunction targetSupplier) diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 36a2f93e63188..cb0ae6021a717 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -582,7 +582,7 @@ protected DiscoveryNode getFakeDiscoNode(String id) { /** recovers a replica from the given primary **/ protected void recoverReplica(IndexShard replica, IndexShard primary, boolean startReplica) throws IOException { recoverReplica(replica, primary, - (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, recoveryListener), + (r, sourceNode) -> new RecoveryTarget(r, sourceNode, null, null, recoveryListener), true, startReplica); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index 50079a920525f..bcc08edecb234 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; @@ -174,7 +175,18 @@ public static SearchSourceBuilder randomSearchSourceBuilder( if (randomBoolean()) { int numFields = randomInt(5); for (int i = 0; i < numFields; i++) { - builder.fetchField(randomAlphaOfLengthBetween(5, 10)); + String field = randomAlphaOfLengthBetween(5, 10); + String format = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null; + builder.fetchField(new FieldAndFormat(field, format)); + } + } + + if (randomBoolean()) { + int numFields = randomInt(5); + for (int i = 0; i < numFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + String format = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null; + builder.docValueField(field, format); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 6ddcf8b041fa8..5f7eaa0c1ec1b 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -471,7 +471,7 @@ protected A searchAndReduc indexSettings, query, breakerService, - builder.bytesToPreallocate(), + randomBoolean() ? 0 : builder.bytesToPreallocate(), maxBucket, fieldTypes ); diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/many_filters.txt b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/many_filters.txt similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/many_filters.txt rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/many_filters.txt diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_authors.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_authors.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_authors.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_authors.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_authors_lastname.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_authors_lastname.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_authors_lastname.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_authors_lastname.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_pr.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_pr.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_pr.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_pr.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_distributors_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_distributors_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_distributors_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_distributors_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_no_distributors_name_no_street.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_no_distributors_name_no_street.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_no_distributors_name_no_street.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_en_no_distributors_name_no_street.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_no_distributors.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_no_distributors.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_no_distributors.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_properties_no_distributors.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_tags_authors_no_name.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_tags_authors_no_name.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_just_tags_authors_no_name.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_just_tags_authors_no_name.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_authors.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_authors.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_authors.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_authors.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_authors_lastname.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_authors_lastname.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_authors_lastname.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_authors_lastname.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_pr.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_pr.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_pr.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_pr.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_distributors_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_distributors_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_distributors_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_distributors_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_en_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_en_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_en_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_en_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_names.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_names.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_names.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_properties_names.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_tags.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_tags.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_tags.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_tags.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_title.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_title.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_title.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_title.json diff --git a/server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_title_pages.json b/test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_title_pages.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/common/xcontent/support/sample_no_title_pages.json rename to test/framework/src/main/resources/org/elasticsearch/common/xcontent/support/sample_no_title_pages.json diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index 3f1ba843af178..4318333698e75 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -1,4 +1,6 @@ apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-cluster-test' + esplugin { name 'x-pack-analytics' description 'Elasticsearch Expanded Pack Plugin - Analytics' diff --git a/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java new file mode 100644 index 0000000000000..d878ae93c52e7 --- /dev/null +++ b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xpack.analytics.multiterms; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.analytics.AnalyticsPlugin; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.stream.IntStream; + +/** + * test forked from CardinalityWithRequestBreakerIT + */ +public class MultiTermsWithRequestBreakerIT extends ESIntegTestCase { + + protected Collection> nodePlugins() { + return List.of(AnalyticsPlugin.class); + } + + /** + * Test that searches using multiterms aggregations returns all request breaker memory. + */ + public void testRequestBreaker() throws Exception { + final String requestBreaker = randomIntBetween(1, 10000) + "kb"; + logger.info("--> Using request breaker setting: {}", requestBreaker); + + indexRandom( + true, + IntStream.range(0, randomIntBetween(10, 1000)) + .mapToObj( + i -> client().prepareIndex("test") + .setId("id_" + i) + .setSource(Map.of("field0", randomAlphaOfLength(5), "field1", randomAlphaOfLength(5))) + ) + .toArray(IndexRequestBuilder[]::new) + ); + + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings( + Settings.builder().put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), requestBreaker) + ) + .get(); + + try { + client().prepareSearch("test") + .addAggregation( + new MultiTermsAggregationBuilder("xxx").terms( + List.of( + new MultiValuesSourceFieldConfig.Builder().setFieldName("field0.keyword").build(), + new MultiValuesSourceFieldConfig.Builder().setFieldName("field1.keyword").build() + ) + ) + ) + .get(); + } catch (ElasticsearchException e) { + if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { + throw e; + } + } + + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings( + Settings.builder().putNull(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey()) + ) + .get(); + + // validation done by InternalTestCluster.ensureEstimatedStats() + } +} diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index b522a5efe7eb5..65f26c391612b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; @@ -110,7 +111,6 @@ protected MultiTermsAggregator( .collect(Collectors.toList()); keyConverters = values.stream().map(TermValuesSource::keyConverter).collect(Collectors.toList()); bucketOrds = BytesKeyedBucketOrds.build(context.bigArrays(), cardinality); - } private boolean subAggsNeedScore() { @@ -220,6 +220,11 @@ public void accept(Integer start) throws IOException { }; } + @Override + protected void doClose() { + Releasables.close(bucketOrds); + } + @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { InternalMultiTerms.Bucket[][] topBucketsPerOrd = new InternalMultiTerms.Bucket[owningBucketOrds.length][]; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index e41053e19e0eb..411a9b1aeb20f 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -372,7 +372,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { // We need to recover the replica async to release the main thread for the following task to fill missing // operations between the local checkpoint and max_seq_no which the recovering replica is waiting for. recoveryFuture = group.asyncRecoverReplica(newReplica, - (shard, sourceNode) -> new RecoveryTarget(shard, sourceNode, null, recoveryListener) {}); + (shard, sourceNode) -> new RecoveryTarget(shard, sourceNode, null, null, recoveryListener) {}); } } if (recoveryFuture != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 36c0ecfbd775f..7f8fd2ebfe52f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -41,10 +41,6 @@ public class XPackLicenseState { * Each value defines the licensed state necessary for the feature to be allowed. */ public enum Feature { - SECURITY_AUDITING(OperationMode.GOLD, false), - SECURITY_TOKEN_SERVICE(OperationMode.STANDARD, false), - SECURITY_AUTHORIZATION_REALM(OperationMode.PLATINUM, true), - SECURITY_AUTHORIZATION_ENGINE(OperationMode.PLATINUM, true), OPERATOR_PRIVILEGES(OperationMode.ENTERPRISE, true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java index c1a087061f994..2db295644553e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java @@ -115,4 +115,8 @@ default AuthenticationFailureHandler getAuthenticationFailureHandler(SecurityCom default AuthorizationEngine getAuthorizationEngine(Settings settings) { return null; } + + default String extensionName() { + return getClass().getName(); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java index 112702dc8db67..e99464fe74e63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java @@ -40,7 +40,7 @@ public class IndicesAccessControl { public IndicesAccessControl(boolean granted, Map indexPermissions) { this.granted = granted; - this.indexPermissions = indexPermissions; + this.indexPermissions = Objects.requireNonNull(indexPermissions); } /** @@ -292,7 +292,7 @@ private static class AllowAllIndicesAccessControl extends IndicesAccessControl { private final IndexAccessControl allowAllIndexAccessControl = new IndexAccessControl(true, null, null); private AllowAllIndicesAccessControl() { - super(true, null); + super(true, Map.of()); } @Override @@ -301,13 +301,8 @@ public IndexAccessControl getIndexPermissions(String index) { } @Override - public boolean isGranted() { - return true; - } - - @Override - public Collection getDeniedIndices() { - return Set.of(); + public String toString() { + return "AllowAllIndicesAccessControl{}"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index 238c186c36bbd..9def76ffbd187 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -86,8 +86,8 @@ private IndicesPermission(Automaton restrictedNamesAutomaton, Group[] groups) { this.restrictedNamesAutomaton = restrictedNamesAutomaton; this.characterRunAutomaton = new CharacterRunAutomaton(restrictedNamesAutomaton); this.groups = groups; - this.hasFieldOrDocumentLevelSecurity = Arrays.stream(groups) - .anyMatch(g -> g.hasQuery() || g.fieldPermissions.hasFieldLevelSecurity()); + this.hasFieldOrDocumentLevelSecurity = Arrays.stream(groups).noneMatch(Group::isTotal) + && Arrays.stream(groups).anyMatch(g -> g.hasQuery() || g.fieldPermissions.hasFieldLevelSecurity()); } /** diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 32f0f36aa8c4b..7b52832889d1c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.license.License.OperationMode; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackField; @@ -86,67 +85,6 @@ public static OperationMode randomBasicStandardOrGold() { return randomFrom(BASIC, STANDARD, GOLD); } - public void testSecurityDefaults() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(true)); - } - - public void testSecurityStandard() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - licenseState.update(STANDARD, true, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(false)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - - public void testSecurityStandardExpired() { - XPackLicenseState licenseState = new XPackLicenseState( () -> 0); - licenseState.update(STANDARD, false, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(false)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - - public void testSecurityBasic() { - XPackLicenseState licenseState = new XPackLicenseState( () -> 0); - licenseState.update(BASIC, true, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(false)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(false)); - } - - public void testSecurityGold() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - licenseState.update(GOLD, true, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(true)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - - public void testSecurityGoldExpired() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - licenseState.update(GOLD, false, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(true)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - - public void testSecurityPlatinum() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - licenseState.update(PLATINUM, true, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(true)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - - public void testSecurityPlatinumExpired() { - XPackLicenseState licenseState = new XPackLicenseState(() -> 0); - licenseState.update(PLATINUM, false, null); - - assertThat(licenseState.checkFeature(Feature.SECURITY_AUDITING), is(true)); - assertThat(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE), is(true)); - } - public void testSecurityAckBasicToNotGoldOrStandard() { OperationMode toMode = randomFrom(OperationMode.values(), mode -> mode != GOLD && mode != STANDARD); assertAckMessages(XPackField.SECURITY, BASIC, toMode, 0); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index c31dae68bf79b..299ce95ae4568 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -35,11 +35,12 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -108,7 +109,7 @@ public void setUpMocks() { clusterAdminClient = mock(ClusterAdminClient.class); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(new ClusterHealthResponse()); + listener.onResponse(new ClusterHealthResponse("", Strings.EMPTY_ARRAY, ClusterState.EMPTY_STATE, false)); return null; }).when(clusterAdminClient).health(any(ClusterHealthRequest.class), any(ActionListener.class)); diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java similarity index 82% rename from x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java rename to x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java index 393c202520de2..521a1176fdbbe 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlRestIT.java +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/java/org/elasticsearch/xpack/eql/EqlClientYamlIT.java @@ -12,9 +12,9 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -public class EqlRestIT extends ESClientYamlSuiteTestCase { +public class EqlClientYamlIT extends ESClientYamlSuiteTestCase { - public EqlRestIT(final ClientYamlTestCandidate testCandidate) { + public EqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { super(testCandidate); } diff --git a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/20_wait_for_checkpoints.yml b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/20_wait_for_checkpoints.yml index 7637e7cce4cf3..5610502a65d23 100644 --- a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/20_wait_for_checkpoints.yml +++ b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/20_wait_for_checkpoints.yml @@ -32,6 +32,28 @@ setup: - do: indices.refresh: {} +--- +"Execute successful search with wait_for_checkpoints default": + - do: + fleet.search: + index: "test-after-refresh" + allow_partial_search_results: false + + body: { query: { match_all: {} } } + + - match: { _shards.successful: 1 } + - match: { hits.total.value: 2 } + + - do: + fleet.search: + index: "test-after-refresh" + allow_partial_search_results: false + wait_for_checkpoints: [] + body: { query: { match_all: { } } } + + - match: { _shards.successful: 1 } + - match: { hits.total.value: 2 } + --- "Execute successful after refresh search": - do: @@ -114,3 +136,19 @@ setup: - match: { responses.1._shards.successful: 1 } - match: { responses.1.hits.total.value: 2 } - match: { responses.2.error.caused_by.type: "illegal_argument_exception" } + +--- +"Test msearch wait_for_checkpoints default": + - do: + fleet.msearch: + index: "test-after-refresh" + body: + - { "allow_partial_search_results": false } + - { query: { match_all: { } } } + - { "allow_partial_search_results": false, wait_for_checkpoints: [] } + - { query: { match_all: { } } } + + - match: { responses.0._shards.successful: 1 } + - match: { responses.0.hits.total.value: 2 } + - match: { responses.1._shards.successful: 1 } + - match: { responses.1.hits.total.value: 2 } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 0147267cb3023..b9c208da927d5 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -68,7 +68,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli for (int i = 0; i < stringWaitForCheckpoints.length; ++i) { waitForCheckpoints[i] = Long.parseLong(stringWaitForCheckpoints[i]); } - searchRequest.setWaitForCheckpoints(Collections.singletonMap("*", waitForCheckpoints)); + if (waitForCheckpoints.length != 0) { + searchRequest.setWaitForCheckpoints(Collections.singletonMap("*", waitForCheckpoints)); + } return true; } else if ("wait_for_checkpoints_timeout".equals(key)) { final TimeValue waitForCheckpointsTimeout = nodeTimeValue(value, TimeValue.timeValueSeconds(30)); @@ -96,7 +98,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } } long[] checkpoints = searchRequest.getWaitForCheckpoints().get("*"); - searchRequest.setWaitForCheckpoints(Collections.singletonMap(indices[0], checkpoints)); + if (checkpoints != null) { + searchRequest.setWaitForCheckpoints(Collections.singletonMap(indices[0], checkpoints)); + } } return channel -> { diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java index fdee70c38cdd2..e583b28429134 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetSearchAction.java @@ -76,7 +76,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli "Fleet search API only supports searching a single index. Found: [" + Arrays.toString(indices1) + "]." ); } - sr.setWaitForCheckpoints(Collections.singletonMap(indices1[0], waitForCheckpoints)); + if (waitForCheckpoints.length != 0) { + sr.setWaitForCheckpoints(Collections.singletonMap(indices1[0], waitForCheckpoints)); + } final TimeValue waitForCheckpointsTimeout = request.paramAsTime( "wait_for_checkpoints_timeout", TimeValue.timeValueSeconds(30) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index 16058fbdae4f2..d413fb055dbbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -105,7 +105,7 @@ protected CategorizeTextAggregator( @Override protected void doClose() { super.doClose(); - Releasables.close(this.analyzer, this.bytesRefHash); + Releasables.close(this.analyzer, this.bytesRefHash, this.bucketOrds, this.categorizers); } @Override diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java index 6d501fa50ae22..0eab67a81a5f2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterResourceTests.java @@ -120,7 +120,6 @@ public void testInvalidVersionBlocks() { verifyNoMoreInteractions(client); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78736") public void testTemplateCheckBlocksAfterSuccessfulVersion() { final Exception exception = failureGetException(); final boolean firstSucceeds = randomBoolean(); @@ -159,8 +158,8 @@ public void testTemplateCheckBlocksAfterSuccessfulVersion() { expectedGets += successful + 1; // the string of successes, then the last failure. } - if (unsuccessful == 0) { - // There is only going to be one response, and it will be an exception + if (successfulFirst && unsuccessful == 0) { + // In this case, there will be only one failed response, and it'll be an exception expectedResult = null; } else { // The first bad response will be either a 404 or a template with an old version diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java index 4df04655d6e49..67cf4b1207f25 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java @@ -8,13 +8,12 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.search.internal.InternalScrollSearchRequest; @@ -32,15 +31,16 @@ import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.junit.Before; import java.util.Collections; -import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.AUTHORIZATION_INFO_KEY; import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.ORIGINATING_ACTION_KEY; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.security.authz.AuthorizationServiceTests.authzInfoRoles; import static org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener.ensureAuthenticatedUserIsSame; import static org.hamcrest.Matchers.is; @@ -98,8 +98,8 @@ public void testValidateSearchContext() throws Exception { new Authentication(new User("test", "role"), new RealmRef("realm", "file", "node"), null)); final IndicesAccessControl indicesAccessControl = mock(IndicesAccessControl.class); readerContext.putInContext(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, indicesAccessControl); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); AuditTrail auditTrail = mock(AuditTrail.class); @@ -191,8 +191,8 @@ public void testEnsuredAuthenticatedUserIsSame() { ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); final String action = randomAlphaOfLength(4); TransportRequest request = Empty.INSTANCE; - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); AuditTrail auditTrail = mock(AuditTrail.class); AuditTrailService auditTrailService = new AuditTrailService(Collections.singletonList(auditTrail), licenseState); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 2ca883b32c662..3c149d639b323 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; @@ -75,13 +76,15 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.transport.nio.NioGroupFactory; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; @@ -98,8 +101,8 @@ import org.elasticsearch.xpack.core.security.action.GrantApiKeyAction; import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyAction; -import org.elasticsearch.xpack.core.security.action.enrollment.NodeEnrollmentAction; import org.elasticsearch.xpack.core.security.action.enrollment.KibanaEnrollmentAction; +import org.elasticsearch.xpack.core.security.action.enrollment.NodeEnrollmentAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutAction; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationAction; @@ -171,8 +174,8 @@ import org.elasticsearch.xpack.security.action.TransportGrantApiKeyAction; import org.elasticsearch.xpack.security.action.TransportInvalidateApiKeyAction; import org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction; -import org.elasticsearch.xpack.security.action.enrollment.TransportNodeEnrollmentAction; import org.elasticsearch.xpack.security.action.enrollment.TransportKibanaEnrollmentAction; +import org.elasticsearch.xpack.security.action.enrollment.TransportNodeEnrollmentAction; import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter; import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectAuthenticateAction; import org.elasticsearch.xpack.security.action.oidc.TransportOpenIdConnectLogoutAction; @@ -233,8 +236,8 @@ import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; import org.elasticsearch.xpack.security.authz.interceptor.BulkShardRequestInterceptor; -import org.elasticsearch.xpack.security.authz.interceptor.IndicesAliasesRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.DlsFlsLicenseRequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.IndicesAliasesRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.RequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.ResizeRequestInterceptor; import org.elasticsearch.xpack.security.authz.interceptor.SearchRequestInterceptor; @@ -260,8 +263,8 @@ import org.elasticsearch.xpack.security.rest.action.apikey.RestGrantApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestInvalidateApiKeyAction; import org.elasticsearch.xpack.security.rest.action.apikey.RestQueryApiKeyAction; -import org.elasticsearch.xpack.security.rest.action.enrollment.RestNodeEnrollmentAction; import org.elasticsearch.xpack.security.rest.action.enrollment.RestKibanaEnrollAction; +import org.elasticsearch.xpack.security.rest.action.enrollment.RestNodeEnrollmentAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction; import org.elasticsearch.xpack.security.rest.action.oidc.RestOpenIdConnectAuthenticateAction; @@ -354,9 +357,11 @@ public class Security extends Plugin implements SystemIndexPlugin, IngestPlugin, // TODO: ip filtering does not actually track license usage yet public static final LicensedFeature.Momentary IP_FILTERING_FEATURE = - LicensedFeature.momentaryLenient(null, "security_ip_filtering", License.OperationMode.GOLD); + LicensedFeature.momentaryLenient(null, "security-ip-filtering", License.OperationMode.GOLD); public static final LicensedFeature.Momentary AUDITING_FEATURE = - LicensedFeature.momentaryLenient(null, "security_auditing", License.OperationMode.GOLD); + LicensedFeature.momentaryLenient(null, "security-auditing", License.OperationMode.GOLD); + public static final LicensedFeature.Momentary TOKEN_SERVICE_FEATURE = + LicensedFeature.momentaryLenient(null, "security-token-service", License.OperationMode.STANDARD); private static final String REALMS_FEATURE_FAMILY = "security-realms"; // Builtin realms (file/native) realms are Basic licensed, so don't need to be checked or tracked @@ -378,6 +383,11 @@ public class Security extends Plugin implements SystemIndexPlugin, IngestPlugin, public static final LicensedFeature.Persistent CUSTOM_REALMS_FEATURE = LicensedFeature.persistentLenient(REALMS_FEATURE_FAMILY, "custom", License.OperationMode.PLATINUM); + public static final LicensedFeature.Momentary DELEGATED_AUTHORIZATION_FEATURE = + LicensedFeature.momentary(null, "security-delegated-authorization", License.OperationMode.PLATINUM); + public static final LicensedFeature.Momentary AUTHORIZATION_ENGINE_FEATURE = + LicensedFeature.momentary(null, "security-authorization-engine", License.OperationMode.PLATINUM); + // Custom role providers are Platinum+ public static final LicensedFeature.Persistent CUSTOM_ROLE_PROVIDERS_FEATURE = LicensedFeature.persistent(null, "security-roles-provider", License.OperationMode.PLATINUM); @@ -560,7 +570,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste extensionComponents ); if (providers != null && providers.isEmpty() == false) { - customRoleProviders.put(extension.toString(), providers); + customRoleProviders.put(extension.extensionName(), providers); } } @@ -669,37 +679,15 @@ auditTrailService, failureHandler, threadPool, anonymousUser, getAuthorizationEn } private AuthorizationEngine getAuthorizationEngine() { - AuthorizationEngine authorizationEngine = null; - String extensionName = null; - for (SecurityExtension extension : securityExtensions) { - final AuthorizationEngine extensionEngine = extension.getAuthorizationEngine(settings); - if (extensionEngine != null && authorizationEngine != null) { - throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " - + "both set an authorization engine"); - } - authorizationEngine = extensionEngine; - extensionName = extension.toString(); - } - - if (authorizationEngine != null) { - logger.debug("Using authorization engine from extension [" + extensionName + "]"); - } - return authorizationEngine; + return findValueFromExtensions("authorization engine", extension -> extension.getAuthorizationEngine(settings)); } private AuthenticationFailureHandler createAuthenticationFailureHandler(final Realms realms, final SecurityExtension.SecurityComponents components) { - AuthenticationFailureHandler failureHandler = null; - String extensionName = null; - for (SecurityExtension extension : securityExtensions) { - AuthenticationFailureHandler extensionFailureHandler = extension.getAuthenticationFailureHandler(components); - if (extensionFailureHandler != null && failureHandler != null) { - throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " - + "both set an authentication failure handler"); - } - failureHandler = extensionFailureHandler; - extensionName = extension.toString(); - } + AuthenticationFailureHandler failureHandler = findValueFromExtensions( + "authentication failure handler", + extension -> extension.getAuthenticationFailureHandler(components) + ); if (failureHandler == null) { logger.debug("Using default authentication failure handler"); Supplier>> headersSupplier = () -> { @@ -736,12 +724,48 @@ private AuthenticationFailureHandler createAuthenticationFailureHandler(final Re getLicenseState().addListener(() -> { finalDefaultFailureHandler.setHeaders(headersSupplier.get()); }); - } else { - logger.debug("Using authentication failure handler from extension [" + extensionName + "]"); } return failureHandler; } + /** + * Calls the provided function for each configured extension and return the value that was generated by the extensions. + * If multiple extensions provide a value, throws {@link IllegalStateException}. + * If no extensions provide a value (or if there are no extensions) returns {@code null}. + */ + @Nullable + private T findValueFromExtensions(String valueType, Function method) { + T foundValue = null; + String fromExtension = null; + for (SecurityExtension extension : securityExtensions) { + final T extensionValue = method.apply(extension); + if (extensionValue == null) { + continue; + } + if (foundValue == null) { + foundValue = extensionValue; + fromExtension = extension.extensionName(); + } else { + throw new IllegalStateException( + "Extensions [" + + fromExtension + + "] and [" + + extension.extensionName() + + "] " + + " both attempted to provide a value for [" + + valueType + + "]" + ); + } + } + if (foundValue == null) { + return null; + } else { + logger.debug("Using [{}] [{}] from extension [{}]", valueType, foundValue, fromExtension); + return foundValue; + } + } + @Override public Settings additionalSettings() { return additionalSettings(settings, enabled); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java index 43945e30e0987..e888a2db910f1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java @@ -10,13 +10,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import java.net.InetAddress; @@ -43,7 +43,7 @@ public AuditTrailService(List auditTrails, XPackLicenseState license public AuditTrail get() { if (compositeAuditTrail.isEmpty() == false) { - if (licenseState.checkFeature(Feature.SECURITY_AUDITING)) { + if (Security.AUDITING_FEATURE.check(licenseState)) { return compositeAuditTrail; } else { maybeLogAuditingDisabled(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 190961d5e6b8d..c65d71de58275 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -93,6 +93,7 @@ import org.elasticsearch.xpack.core.security.authc.TokenMetadata; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.support.FeatureNotEnabledException; import org.elasticsearch.xpack.security.support.FeatureNotEnabledException.Feature; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -1586,11 +1587,11 @@ private static String getTokenIdFromDocumentId(String docId) { } private boolean isEnabled() { - return enabled && licenseState.checkFeature(XPackLicenseState.Feature.SECURITY_TOKEN_SERVICE); + return enabled && Security.TOKEN_SERVICE_FEATURE.check(licenseState); } private void ensureEnabled() { - if (licenseState.checkFeature(XPackLicenseState.Feature.SECURITY_TOKEN_SERVICE) == false) { + if (Security.TOKEN_SERVICE_FEATURE.check(licenseState) == false) { throw LicenseUtils.newComplianceException("security tokens"); } if (enabled == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java index 1fc4efd4d91e3..7bf65a1b0ee95 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupport.java @@ -10,18 +10,18 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Tuple; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import java.util.ArrayList; import java.util.List; @@ -82,7 +82,7 @@ public boolean hasDelegation() { * with a meaningful diagnostic message. */ public void resolve(String username, ActionListener resultListener) { - boolean authzOk = licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM); + boolean authzOk = Security.DELEGATED_AUTHORIZATION_FEATURE.check(licenseState); if (authzOk == false) { resultListener.onResponse(AuthenticationResult.unsuccessful( DelegatedAuthorizationSettings.AUTHZ_REALMS_SUFFIX + " are not permitted", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 1c31172e28323..76248eac0fc03 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -28,17 +28,16 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; @@ -71,6 +70,7 @@ import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; @@ -499,7 +499,7 @@ AuthorizationEngine getAuthorizationEngine(final Authentication authentication) private AuthorizationEngine getAuthorizationEngineForUser(final User user) { if (rbacEngine != authorizationEngine - && licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)) { + && Security.AUTHORIZATION_ENGINE_FEATURE.check(licenseState)) { if (ClientReservedRealm.isReserved(user.principal(), settings) || isInternal(user)) { return rbacEngine; } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/TokenBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/TokenBaseRestHandler.java index 3a7cdac342a76..5fce69e7e2fc3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/TokenBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/TokenBaseRestHandler.java @@ -12,8 +12,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; /** @@ -32,7 +32,7 @@ protected Exception checkFeatureAvailable(RestRequest request) { Exception failedFeature = super.checkFeatureAvailable(request); if (failedFeature != null) { return failedFeature; - } else if (licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)) { + } else if (Security.TOKEN_SERVICE_FEATURE.check(licenseState)) { return null; } else { logger.info("Security tokens are not available under the current [{}] license", licenseState.getOperationMode().description()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java index 778b9d069b399..87b24d0eaed9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.action.oidc; import com.nimbusds.jwt.JWT; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -32,8 +33,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -48,13 +48,14 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectTestCase; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; @@ -176,8 +177,8 @@ public void setup() throws Exception { final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + final MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); tokenService = new TokenService(settings, Clock.systemUTC(), client, licenseState, new SecurityContext(settings, threadContext), securityIndex, securityIndex, clusterService); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 26b9cd8e8a096..294a0de17ffef 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -8,10 +8,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -35,20 +35,16 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.core.PathUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.tasks.Task; @@ -57,6 +53,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -69,6 +68,7 @@ import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.saml.SamlRealmSettings; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.saml.SamlLogoutRequestHandler; @@ -209,8 +209,8 @@ void doExecute(ActionType action, Request request, ActionListener idxReqReference; private AuthenticationService authenticationService; - private XPackLicenseState license; + private MockLicenseState license; private SecurityContext securityContext; @Before @@ -177,8 +177,8 @@ && new String((byte[]) token.credentials(), StandardCharsets.UTF_8).equals("fail this.clusterService = ClusterServiceUtils.createClusterService(threadPool); - this.license = mock(XPackLicenseState.class); - when(license.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + this.license = mock(MockLicenseState.class); + when(license.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java index 154d240fff068..b845e2c0c1e9a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenActionTests.java @@ -19,8 +19,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.indices.IndexClosedException; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; @@ -32,6 +31,7 @@ import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenResponse; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; @@ -58,7 +58,7 @@ public class TransportInvalidateTokenActionTests extends ESTestCase { private Client client; private SecurityIndexManager securityIndex; private ClusterService clusterService; - private XPackLicenseState license; + private MockLicenseState license; private SecurityContext securityContext; @Before @@ -70,8 +70,8 @@ public void setup() { when(client.settings()).thenReturn(SETTINGS); securityIndex = mock(SecurityIndexManager.class); this.clusterService = ClusterServiceUtils.createClusterService(threadPool); - this.license = mock(XPackLicenseState.class); - when(license.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + this.license = mock(MockLicenseState.class); + when(license.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); } public void testInvalidateTokensWhenIndexUnavailable() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java index a585206585679..f0e97f7e87569 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java @@ -11,8 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.license.License; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -22,6 +21,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import org.junit.Before; @@ -47,7 +47,7 @@ public class AuditTrailServiceTests extends ESTestCase { private AuthenticationToken token; private TransportRequest request; private RestRequest restRequest; - private XPackLicenseState licenseState; + private MockLicenseState licenseState; private boolean isAuditingAllowed; @Before @@ -57,10 +57,10 @@ public void init() throws Exception { auditTrailsBuilder.add(mock(AuditTrail.class)); } auditTrails = unmodifiableList(auditTrailsBuilder); - licenseState = mock(XPackLicenseState.class); + licenseState = mock(MockLicenseState.class); service = new AuditTrailService(auditTrails, licenseState); isAuditingAllowed = randomBoolean(); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(isAuditingAllowed); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(isAuditingAllowed); token = mock(AuthenticationToken.class); request = mock(TransportRequest.class); restRequest = mock(RestRequest.class); @@ -118,7 +118,7 @@ public void testNoLogRecentlyWhenLicenseProhibitsAuditing() throws Exception { public void testAuthenticationFailed() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, token, "_action", request); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, token, "_action", request); @@ -131,7 +131,7 @@ public void testAuthenticationFailed() throws Exception { public void testAuthenticationFailedNoToken() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, "_action", request); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, "_action", request); @@ -144,7 +144,7 @@ public void testAuthenticationFailedNoToken() throws Exception { public void testAuthenticationFailedRestNoToken() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, restRequest); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, restRequest); @@ -157,7 +157,7 @@ public void testAuthenticationFailedRestNoToken() throws Exception { public void testAuthenticationFailedRest() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, token, restRequest); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, token, restRequest); @@ -170,7 +170,7 @@ public void testAuthenticationFailedRest() throws Exception { public void testAuthenticationFailedRealm() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, "_realm", token, "_action", request); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, "_realm", token, "_action", request); @@ -183,7 +183,7 @@ public void testAuthenticationFailedRealm() throws Exception { public void testAuthenticationFailedRestRealm() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationFailed(requestId, "_realm", token, restRequest); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationFailed(requestId, "_realm", token, restRequest); @@ -196,7 +196,7 @@ public void testAuthenticationFailedRestRealm() throws Exception { public void testAnonymousAccess() throws Exception { final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().anonymousAccessDenied(requestId, "_action", request); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).anonymousAccessDenied(requestId, "_action", request); @@ -213,7 +213,7 @@ public void testAccessGranted() throws Exception { () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { randomAlphaOfLengthBetween(1, 6) }); final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().accessGranted(requestId, authentication, "_action", request, authzInfo); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).accessGranted(requestId, authentication, "_action", request, authzInfo); @@ -230,7 +230,7 @@ public void testAccessDenied() throws Exception { () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { randomAlphaOfLengthBetween(1, 6) }); final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().accessDenied(requestId, authentication, "_action", request, authzInfo); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).accessDenied(requestId, authentication, "_action", request, authzInfo); @@ -244,7 +244,7 @@ public void testConnectionGranted() throws Exception { InetAddress inetAddress = InetAddress.getLoopbackAddress(); SecurityIpFilterRule rule = randomBoolean() ? SecurityIpFilterRule.ACCEPT_ALL : IPFilter.DEFAULT_PROFILE_ACCEPT_ALL; service.get().connectionGranted(inetAddress, "client", rule); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).connectionGranted(inetAddress, "client", rule); @@ -258,7 +258,7 @@ public void testConnectionDenied() throws Exception { InetAddress inetAddress = InetAddress.getLoopbackAddress(); SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all"); service.get().connectionDenied(inetAddress, "client", rule); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).connectionDenied(inetAddress, "client", rule); @@ -273,7 +273,7 @@ public void testAuthenticationSuccessRest() throws Exception { new RealmRef(null, null, null)); final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationSuccess(requestId, authentication, restRequest); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationSuccess(requestId, authentication, restRequest); @@ -288,7 +288,7 @@ public void testAuthenticationSuccessTransport() throws Exception { new RealmRef(null, null, null)); final String requestId = randomAlphaOfLengthBetween(6, 12); service.get().authenticationSuccess(requestId, authentication, "_action", request); - verify(licenseState).checkFeature(Feature.SECURITY_AUDITING); + verify(licenseState).isAllowed(Security.AUDITING_FEATURE); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { verify(auditTrail).authenticationSuccess(requestId, authentication, "_action", request); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 518e9815a11c4..00eac5e47a4ac 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -32,10 +32,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -44,8 +42,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.get.GetResult; @@ -55,7 +53,6 @@ import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; @@ -66,6 +63,8 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -231,9 +230,9 @@ public void init() throws Exception { } } when(licenseState.isAllowed(Security.CUSTOM_REALMS_FEATURE)).thenReturn(true); - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); when(licenseState.getOperationMode()).thenReturn(randomFrom(License.OperationMode.ENTERPRISE, License.OperationMode.PLATINUM)); ReservedRealm reservedRealm = mock(ReservedRealm.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 10f20a94f2ea3..f14a3b0bd3326 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -47,10 +47,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -59,8 +56,7 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.node.Node; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -70,6 +66,9 @@ import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -79,6 +78,7 @@ import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.TokenService.RefreshTokenStatus; import org.elasticsearch.xpack.security.support.FeatureNotEnabledException; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -136,7 +136,7 @@ public class TokenServiceTests extends ESTestCase { private DiscoveryNode oldNode; private Settings tokenServiceEnabledSettings = Settings.builder() .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true).build(); - private XPackLicenseState licenseState; + private MockLicenseState licenseState; private SecurityContext securityContext; @Before @@ -195,8 +195,8 @@ public void setupClient() { this.clusterService = ClusterServiceUtils.createClusterService(threadPool); // License state (enabled by default) - licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); // version 7.2 was an "inflection" point in the Token Service development (access_tokens as UUIDS, multiple concurrent refreshes, // tokens docs on a separate index), let's test the TokenService works in a mixed cluster with nodes with versions prior to these @@ -891,7 +891,7 @@ public void testSupercedingTokenEncryption() throws Exception { } public void testCannotValidateTokenIfLicenseDoesNotAllowTokens() throws Exception { - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, Clock.systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); final String userTokenId = UUIDs.randomBase64UUID(); @@ -904,7 +904,7 @@ public void testCannotValidateTokenIfLicenseDoesNotAllowTokens() throws Exceptio storeTokenHeader(threadContext, tokenService.prependVersionAndEncodeAccessToken(token.getVersion(), accessToken)); PlainActionFuture authFuture = new PlainActionFuture<>(); - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(false); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(false); final SecureString bearerToken = Authenticator.extractBearerTokenFromHeader(threadContext); tokenService.tryAuthenticateToken(bearerToken, authFuture); UserToken authToken = authFuture.actionGet(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java index f166a4d4f71d1..a12afe08773c9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTestCase.java @@ -10,13 +10,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Tuple; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -27,9 +26,10 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.kerberos.KerberosRealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; @@ -72,7 +72,7 @@ public abstract class KerberosRealmTestCase extends ESTestCase { protected KerberosTicketValidator mockKerberosTicketValidator; protected NativeRoleMappingStore mockNativeRoleMappingStore; - protected XPackLicenseState licenseState; + protected MockLicenseState licenseState; protected static final Set roles = Sets.newHashSet("admin", "kibana_user"); @@ -84,8 +84,8 @@ public void setup() throws Exception { globalSettings = Settings.builder().put("path.home", dir).build(); settings = buildKerberosRealmSettings(REALM_NAME, writeKeyTab(dir.resolve("key.keytab"), "asa").toString(), 100, "10m", true, randomBoolean()); - licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(true); + licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index f60dce4110bc4..e8d2c841bdb85 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -21,8 +21,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngine; @@ -48,6 +47,7 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase; import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; @@ -96,7 +96,7 @@ public class LdapRealmTests extends LdapTestCase { private ResourceWatcherService resourceWatcherService; private Settings defaultGlobalSettings; private SSLService sslService; - private XPackLicenseState licenseState; + private MockLicenseState licenseState; @Before public void init() throws Exception { @@ -104,8 +104,8 @@ public void init() throws Exception { resourceWatcherService = new ResourceWatcherService(Settings.EMPTY, threadPool); defaultGlobalSettings = Settings.builder().put("path.home", createTempDir()).build(); sslService = new SSLService(TestEnvironment.newEnvironment(defaultGlobalSettings)); - licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(true); + licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index c24ae871a0542..946d623638d59 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -10,15 +10,15 @@ import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.oauth2.sdk.id.State; import com.nimbusds.openid.connect.sdk.Nonce; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutResponse; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; @@ -28,9 +28,10 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.support.MockLookupRealm; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.hamcrest.Matchers; import org.junit.Before; import org.mockito.stubbing.Answer; @@ -448,8 +449,8 @@ private AuthenticationResult authenticateWithOidc(String principal, UserRoleMapp } private void initializeRealms(Realm... realms) { - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true); final List realmList = Arrays.asList(realms); for (Realm realm : realms) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 26537e0beaceb..c3c4fd5d73153 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -17,8 +17,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -32,6 +31,7 @@ import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.support.NoOpLogger; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.BytesKey; import org.elasticsearch.xpack.security.authc.support.MockLookupRealm; import org.junit.Before; @@ -51,8 +51,8 @@ import java.util.regex.Pattern; import javax.security.auth.x500.X500Principal; -import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; +import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -72,7 +72,7 @@ public class PkiRealmTests extends ESTestCase { public static final String REALM_NAME = "my_pki"; private Settings globalSettings; - private XPackLicenseState licenseState; + private MockLicenseState licenseState; @Before public void setup() throws Exception { @@ -81,8 +81,8 @@ public void setup() throws Exception { .put("path.home", createTempDir()) .put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0) .build(); - licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(true); + licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true); } public void testTokenSupport() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index 35da6f3d04b79..f1e7f0f1e6418 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -20,8 +20,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.jdk.JavaVersion; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.watcher.ResourceWatcherService; @@ -37,6 +36,7 @@ import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.TestsSSLService; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.support.MockLookupRealm; import org.hamcrest.Matchers; @@ -303,8 +303,8 @@ private AuthenticationResult performAuthentication(UserRoleMapper roleMapper, bo } private void initializeRealms(Realm... realms) { - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true); final List realmList = Arrays.asList(realms); for (Realm realm : realms) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupportTests.java index 5a0b8a8f14ee4..54c4d0e6d1ea6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DelegatedAuthorizationSupportTests.java @@ -12,14 +12,15 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.junit.Before; import java.util.ArrayList; @@ -193,8 +194,8 @@ public void testLicenseRejection() throws Exception { } private XPackLicenseState getLicenseState(boolean authzRealmsAllowed) { - final XPackLicenseState license = mock(XPackLicenseState.class); - when(license.checkFeature(Feature.SECURITY_AUTHORIZATION_REALM)).thenReturn(authzRealmsAllowed); + final MockLicenseState license = mock(MockLicenseState.class); + when(license.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(authzRealmsAllowed); return license; } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index a2393b2881de8..4522f3487807b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -60,8 +60,12 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.ParsedScrollId; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; @@ -88,22 +92,21 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; -import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.bulk.stats.BulkOperationListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchRequest; @@ -113,10 +116,7 @@ import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.action.search.ClosePointInTimeAction; -import org.elasticsearch.action.search.ClosePointInTimeRequest; -import org.elasticsearch.action.search.OpenPointInTimeAction; -import org.elasticsearch.action.search.OpenPointInTimeRequest; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesAction; @@ -154,6 +154,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; @@ -201,8 +202,8 @@ import static org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField.ORIGINATING_ACTION_KEY; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; -import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.RESTRICTED_INDICES_AUTOMATON; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; @@ -250,8 +251,8 @@ public void setup() { when(clusterService.getClusterSettings()).thenReturn(clusterSettings); when(clusterService.state()).thenReturn(ClusterState.EMPTY_STATE); auditTrail = mock(AuditTrail.class); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); auditTrailService = new AuditTrailService(Collections.singletonList(auditTrail), licenseState); threadContext = new ThreadContext(settings); threadPool = mock(ThreadPool.class); @@ -1968,8 +1969,8 @@ public void getUserPrivileges(Authentication authentication, AuthorizationInfo a } }; - XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); authorizationService = new AuthorizationService(Settings.EMPTY, rolesStore, clusterService, auditTrailService, new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(Settings.EMPTY), engine, Collections.emptySet(), licenseState, @@ -1978,61 +1979,61 @@ auditTrailService, new DefaultAuthenticationFailureHandler(Collections.emptyMap( try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(new User("test user", "a_all")); assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(new User("runas", new String[]{"runas_role"}, new User("runner", "runner_role"))); assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); assertEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(new User("runas", new String[]{"runas_role"}, new ElasticUser(true))); assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(new User("elastic", new String[]{"superuser"}, new User("runner", "runner_role"))); assertNotEquals(engine, authorizationService.getAuthorizationEngine(authentication)); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(new User("kibana", new String[]{"kibana_system"}, new ElasticUser(true))); assertNotEquals(engine, authorizationService.getAuthorizationEngine(authentication)); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(true); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { authentication = createAuthentication(randomFrom(XPackUser.INSTANCE, XPackSecurityUser.INSTANCE, new ElasticUser(true), new KibanaUser(true))); assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); - when(licenseState.checkFeature(Feature.SECURITY_AUTHORIZATION_ENGINE)).thenReturn(false); + when(licenseState.isAllowed(Security.AUTHORIZATION_ENGINE_FEATURE)).thenReturn(false); assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java index 8de0f3bee78a8..44b82c536e11c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesAccessControlTests.java @@ -16,8 +16,10 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import java.util.Collections; +import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -122,4 +124,19 @@ public void testSLimitedIndicesAccessControl() { assertThat(result.getIndexPermissions("_index").getDocumentPermissions().getQueries(), is(nullValue())); assertThat(result.getIndexPermissions("_index").getDocumentPermissions().getLimitedByQueries(), equalTo(queries)); } + + public void testAllowAllIndicesAccessControl() { + final IndicesAccessControl allowAll = IndicesAccessControl.allowAll(); + final IndexAccessControl indexAccessControl = allowAll.getIndexPermissions(randomAlphaOfLengthBetween(3, 8)); + assertThat(indexAccessControl.isGranted(), is(true)); + assertThat(indexAccessControl.getDocumentPermissions(), is(DocumentPermissions.allowAll())); + assertThat(indexAccessControl.getFieldPermissions(), is(FieldPermissions.DEFAULT)); + assertThat(allowAll.getDeniedIndices(), emptyIterable()); + assertThat(allowAll.getFieldAndDocumentLevelSecurityUsage(), is(IndicesAccessControl.DlsFlsUsage.NONE)); + assertThat(allowAll.getIndicesWithFieldOrDocumentLevelSecurity(), emptyIterable()); + + final IndicesAccessControl indicesAccessControl = new IndicesAccessControl(randomBoolean(), Map.of()); + assertThat(allowAll.limitIndicesAccessControl(indicesAccessControl), is(indicesAccessControl)); + assertThat(indicesAccessControl.limitIndicesAccessControl(allowAll), is(indicesAccessControl)); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index 9b75714511540..85fb0eb66147b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -503,6 +503,37 @@ public void testAuthorizationForMappingUpdates() { } } + public void testIndicesPermissionHasFieldOrDocumentLevelSecurity() { + // Make sure we have at least one of fieldPermissions and documentPermission + final FieldPermissions fieldPermissions = randomBoolean() ? + new FieldPermissions(new FieldPermissionsDefinition(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) : + FieldPermissions.DEFAULT; + final Set queries; + if (fieldPermissions == FieldPermissions.DEFAULT) { + queries = Set.of(new BytesArray("a query")); + } else { + queries = randomBoolean() ? Set.of(new BytesArray("a query")) : null; + } + + final IndicesPermission indicesPermission1 = new IndicesPermission.Builder(RESTRICTED_INDICES_AUTOMATON) + .addGroup(IndexPrivilege.ALL, fieldPermissions, queries, randomBoolean(), "*") + .build(); + assertThat(indicesPermission1.hasFieldOrDocumentLevelSecurity(), is(true)); + + // IsTotal means no DLS/FLS + final IndicesPermission indicesPermission2 = new IndicesPermission.Builder(RESTRICTED_INDICES_AUTOMATON) + .addGroup(IndexPrivilege.ALL, FieldPermissions.DEFAULT, null, true, "*") + .build(); + assertThat(indicesPermission2.hasFieldOrDocumentLevelSecurity(), is(false)); + + // IsTotal means NO DLS/FLS even when there is another group that has DLS/FLS + final IndicesPermission indicesPermission3 = new IndicesPermission.Builder(RESTRICTED_INDICES_AUTOMATON) + .addGroup(IndexPrivilege.ALL, FieldPermissions.DEFAULT, null, true, "*") + .addGroup(IndexPrivilege.NONE, fieldPermissions, queries, randomBoolean(), "*") + .build(); + assertThat(indicesPermission3.hasFieldOrDocumentLevelSecurity(), is(false)); + } + private static IndexMetadata createIndexMetadata(String name) { Settings.Builder settingsBuilder = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java index 85f49df70447b..153d6f38528da 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.MockLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; @@ -30,6 +29,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.util.Collections; @@ -51,7 +51,7 @@ public class IndicesAliasesRequestInterceptorTests extends ESTestCase { public void testInterceptorThrowsWhenFLSDLSEnabled() { MockLicenseState licenseState = mock(MockLicenseState.class); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); @@ -110,7 +110,7 @@ public void testInterceptorThrowsWhenFLSDLSEnabled() { public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Exception { MockLicenseState licenseState = mock(MockLicenseState.class); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(randomBoolean()); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index 6b57e38c641be..a7ced96da4853 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.MockLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -35,6 +34,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.util.Collections; @@ -56,7 +56,7 @@ public class ResizeRequestInterceptorTests extends ESTestCase { public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { MockLicenseState licenseState = mock(MockLicenseState.class); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); ThreadPool threadPool = mock(ThreadPool.class); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); @@ -108,7 +108,7 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() throws Exception { MockLicenseState licenseState = mock(MockLicenseState.class); when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); - when(licenseState.checkFeature(Feature.SECURITY_AUDITING)).thenReturn(true); + when(licenseState.isAllowed(Security.AUDITING_FEATURE)).thenReturn(true); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); ThreadPool threadPool = mock(ThreadPool.class); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java index 5fd697962ee17..1b31cdddb5988 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java @@ -23,12 +23,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.license.XPackLicenseState.Feature; +import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.TokenServiceMock; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -164,8 +164,8 @@ public static TokenServiceMock tokenService(boolean enabled, ThreadPool threadPo final Clock clock = Clock.fixed(now, ESTestCase.randomZone()); final Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - when(licenseState.checkFeature(Feature.SECURITY_TOKEN_SERVICE)).thenReturn(true); + final MockLicenseState licenseState = mock(MockLicenseState.class); + when(licenseState.isAllowed(Security.TOKEN_SERVICE_FEATURE)).thenReturn(true); final ClusterService clusterService = mock(ClusterService.class); final SecurityContext securityContext = new SecurityContext(settings, threadPool.getThreadContext()); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregator.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregator.java index dc5fb472d87a6..58eb7c1bd42e2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregator.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregator.java @@ -85,7 +85,7 @@ public void collect(int doc, long bucket) throws IOException { @Override public InternalAggregation buildAggregation(long bucket) { - if (valuesSources == null) { + if (valuesSources == null || bucket >= counts.size()) { return buildEmptyAggregation(); } boolean complete = counts.get(bucket) <= size; @@ -98,7 +98,7 @@ public InternalAggregation buildAggregation(long bucket) { @Override public InternalAggregation buildEmptyAggregation() { - return new InternalGeoLine(name, null, null, metadata(), true, includeSorts, sortOrder, size); + return new InternalGeoLine(name, new long[0], new double[0], metadata(), true, includeSorts, sortOrder, size); } @Override diff --git a/x-pack/plugin/spatial/src/yamlRestTest/resources/rest-api-spec/test/60_geo_line.yml b/x-pack/plugin/spatial/src/yamlRestTest/resources/rest-api-spec/test/60_geo_line.yml index f156e4db9586f..f62eb6f10bec5 100644 --- a/x-pack/plugin/spatial/src/yamlRestTest/resources/rest-api-spec/test/60_geo_line.yml +++ b/x-pack/plugin/spatial/src/yamlRestTest/resources/rest-api-spec/test/60_geo_line.yml @@ -49,3 +49,137 @@ - match: { aggregations.trace.geometry.coordinates.1: [4.91235, 52.374081] } - match: { aggregations.trace.geometry.coordinates.2: [4.914722, 52.371667] } - is_true: aggregations.trace.properties.complete + +--- +"Test empty buckets": + - do: + indices.create: + index: test1 + body: + mappings: + properties: + location: + type: geo_point + date: + type: date + entity: + type: keyword + + - do: + indices.create: + index: test2 + body: + mappings: + properties: + location: + type: geo_point + date: + type: date + entity: + type: keyword + + - do: + bulk: + refresh: true + body: + - index: + _index: test1 + _id: 1 + - '{ "date" : "2020-01-01T01:00:00.0Z", "entity" : "e1", "location" : { "lat" : 50.3, "lon" : 0.13 }}' + - index: + _index: test1 + _id: 2 + - '{ "date" : "2020-01-01T01:00:01.0Z", "entity" : "e1", "location" : { "lat" : 50.4, "lon" : 0.13 } }' + - index: + _index: test1 + _id: 3 + - '{ "date" : "2020-01-01T01:00:03.0Z", "entity" : "e1", "location" : { "lat" : 50.5, "lon" : 0.13 }}' + - index: + _index: test2 + _id: 1 + - '{ "date" : "2020-01-02T02:00:01.0Z", "entity" : "e2", "location" : { "lat" : 51.3, "lon" : 0.13 }}' + - index: + _index: test2 + _id: 2 + - '{ "date" : "2020-01-02T02:00:02.0Z", "entity" : "e2", "location" : { "lat" : 51.4, "lon" : 0.13 }}' + - index: + _index: test2 + _id: 3 + - '{ "date" : "2020-01-02T02:00:03.0Z", "entity" : "e2", "location" : { "lat" : 51.5, "lon" : 0.13 }}' + + - do: + search: + index: test1,test2 + body: + size: 6 + aggs: + tracks: + filters: + filters: + 1: + term: + entity: e3 + 2: + term: + entity: e4 + aggs: + path: + geo_line: + point: + field: location + sort: + field: date + + - match: { hits.total.value: 6 } + - match: { aggregations.tracks.buckets.1.doc_count: 0 } + - match: { aggregations.tracks.buckets.1.path.type: "Feature" } + - match: { aggregations.tracks.buckets.1.path.geometry.type: "LineString" } + - length: { aggregations.tracks.buckets.1.path.geometry.coordinates: 0 } + - match: { aggregations.tracks.buckets.1.path.properties.complete: true } + - match: { aggregations.tracks.buckets.2.doc_count: 0 } + - match: { aggregations.tracks.buckets.2.path.type: "Feature" } + - match: { aggregations.tracks.buckets.2.path.geometry.type: "LineString" } + - length: { aggregations.tracks.buckets.2.path.geometry.coordinates: 0 } + - match: { aggregations.tracks.buckets.2.path.properties.complete: true } + + + - do: + search: + index: test1,test2 + body: + size: 6 + aggs: + tracks: + filters: + filters: + 1: + term: + entity: e1 + 2: + term: + entity: e2 + aggs: + path: + geo_line: + point: + field: location + sort: + field: date + + - match: { hits.total.value: 6 } + - match: { aggregations.tracks.buckets.1.doc_count: 3 } + - match: { aggregations.tracks.buckets.1.path.type: "Feature" } + - match: { aggregations.tracks.buckets.1.path.geometry.type: "LineString" } + - length: { aggregations.tracks.buckets.1.path.geometry.coordinates: 3 } + - match: { aggregations.tracks.buckets.1.path.geometry.coordinates.0: [0.13,50.3] } + - match: { aggregations.tracks.buckets.1.path.geometry.coordinates.1: [0.13,50.4] } + - match: { aggregations.tracks.buckets.1.path.geometry.coordinates.2: [0.13,50.5] } + - match: { aggregations.tracks.buckets.1.path.properties.complete: true } + - match: { aggregations.tracks.buckets.2.doc_count: 3 } + - match: { aggregations.tracks.buckets.2.path.type: "Feature" } + - match: { aggregations.tracks.buckets.2.path.geometry.type: "LineString" } + - length: { aggregations.tracks.buckets.2.path.geometry.coordinates: 3 } + - match: { aggregations.tracks.buckets.2.path.geometry.coordinates.0: [0.13,51.3] } + - match: { aggregations.tracks.buckets.2.path.geometry.coordinates.1: [0.13,51.4] } + - match: { aggregations.tracks.buckets.2.path.geometry.coordinates.2: [0.13,51.5] } + - match: { aggregations.tracks.buckets.2.path.properties.complete: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml new file mode 100644 index 0000000000000..a83ec8c410978 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml @@ -0,0 +1,90 @@ +setup: + - do: + indices.create: + index: test + body: + settings: + number_of_replicas: 0 + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + - do: + index: + index: test + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + + - do: + index: + index: test + id: 2 + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + + - do: + index: + index: test + id: 3 + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + +--- +"Basic kNN search": + - do: + knn_search: + index: test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} + +--- +"Test nonexistent field": + - do: + catch: bad_request + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + knn: + field: nonexistent + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + - match: { error.root_cause.0.type: "illegal_argument_exception" } + +--- +"Direct knn queries are disallowed": + - do: + catch: bad_request + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + knn: + field: vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 1 + - match: { error.root_cause.0.type: "illegal_argument_exception" } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java index 050c5f6294239..b9c759783745c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java @@ -188,7 +188,7 @@ public void testCreateLatestVersionedIndexIfRequired_GivenShardInitializationPen doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(new ClusterHealthResponse()); + listener.onResponse(new ClusterHealthResponse("", new String[]{}, ClusterState.EMPTY_STATE, false)); return null; }).when(clusterClient).health(any(), any()); @@ -272,7 +272,7 @@ public void testCreateLatestVersionedIndexIfRequired_GivenConcurrentCreationShar doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(new ClusterHealthResponse()); + listener.onResponse(new ClusterHealthResponse("", new String[]{}, ClusterState.EMPTY_STATE, false)); return null; }).when(clusterClient).health(any(), any()); diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/feature/FeatureFactory.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/feature/FeatureFactory.java index e1e517231ba1e..a38e4bc706512 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/feature/FeatureFactory.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/feature/FeatureFactory.java @@ -56,13 +56,19 @@ public class FeatureFactory { private final CoordinateSequenceFilter sequenceFilter; // pixel precision of the tile in the mercator projection. private final double pixelPrecision; + // size of the buffer in pixels for the clip envelope. we choose a values that makes sure + // we have values outside the tile for polygon crossing the tile so the outline of the + // tile is not part of the final result. + // TODO: consider exposing this parameter so users have control of the buffer's size. + private static final int BUFFER_SIZE_PIXELS = 5; public FeatureFactory(int z, int x, int y, int extent) { this.pixelPrecision = 2 * SphericalMercatorUtils.MERCATOR_BOUNDS / ((1L << z) * extent); final Rectangle r = SphericalMercatorUtils.recToSphericalMercator(GeoTileUtils.toBoundingBox(x, y, z)); final Envelope tileEnvelope = new Envelope(r.getMinX(), r.getMaxX(), r.getMinY(), r.getMaxY()); final Envelope clipEnvelope = new Envelope(tileEnvelope); - clipEnvelope.expandBy(this.pixelPrecision, this.pixelPrecision); + // expand enough the clip envelope to prevent visual artefacts + clipEnvelope.expandBy(BUFFER_SIZE_PIXELS * this.pixelPrecision, BUFFER_SIZE_PIXELS * this.pixelPrecision); final GeometryFactory geomFactory = new GeometryFactory(); this.builder = new JTSGeometryBuilder(geomFactory); this.clipTile = geomFactory.toGeometry(clipEnvelope); diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/DenseVectorPlugin.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/DenseVectorPlugin.java index cdd921c79603f..201c06a76fda4 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/DenseVectorPlugin.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/DenseVectorPlugin.java @@ -7,17 +7,31 @@ package org.elasticsearch.xpack.vectors; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.xpack.vectors.action.RestKnnSearchAction; import org.elasticsearch.xpack.vectors.mapper.DenseVectorFieldMapper; import org.elasticsearch.xpack.vectors.mapper.SparseVectorFieldMapper; +import org.elasticsearch.xpack.vectors.query.KnnVectorQueryBuilder; import java.util.Collections; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.function.Supplier; -public class DenseVectorPlugin extends Plugin implements MapperPlugin { +public class DenseVectorPlugin extends Plugin implements ActionPlugin, MapperPlugin, SearchPlugin { public DenseVectorPlugin() { } @@ -28,4 +42,26 @@ public Map getMappers() { mappers.put(SparseVectorFieldMapper.CONTENT_TYPE, SparseVectorFieldMapper.PARSER); return Collections.unmodifiableMap(mappers); } + + @Override + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + return List.of(new RestKnnSearchAction()); + } + + @Override + public List> getQueries() { + // This query is only meant to be used internally, and not passed to the _search endpoint + return List.of(new QuerySpec<>(KnnVectorQueryBuilder.NAME, KnnVectorQueryBuilder::new, + parser -> { + throw new IllegalArgumentException("[knn] queries cannot be provided directly, use the [_knn_search] endpoint instead"); + })); + } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilder.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilder.java new file mode 100644 index 0000000000000..b69d2f3f2ed3b --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilder.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.action; + +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.vectors.query.KnnVectorQueryBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A builder used in {@link RestKnnSearchAction} to convert the kNN REST request + * into a {@link SearchRequestBuilder}. + */ +class KnnSearchRequestBuilder { + static final String INDEX_PARAM = "index"; + static final String ROUTING_PARAM = "routing"; + + static final ParseField KNN_SECTION_FIELD = new ParseField("knn"); + private static final ObjectParser PARSER; + + static { + PARSER = new ObjectParser<>("knn-search"); + PARSER.declareField(KnnSearchRequestBuilder::knnSearch, KnnSearch::parse, + KNN_SECTION_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField((p, request, c) -> request.fetchSource(FetchSourceContext.fromXContent(p)), + SearchSourceBuilder._SOURCE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); + PARSER.declareFieldArray(KnnSearchRequestBuilder::fields, (p, c) -> FieldAndFormat.fromXContent(p), + SearchSourceBuilder.FETCH_FIELDS_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); + PARSER.declareFieldArray(KnnSearchRequestBuilder::docValueFields, (p, c) -> FieldAndFormat.fromXContent(p), + SearchSourceBuilder.DOCVALUE_FIELDS_FIELD, ObjectParser.ValueType.OBJECT_ARRAY); + PARSER.declareField((p, request, c) -> request.storedFields( + StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), p)), + SearchSourceBuilder.STORED_FIELDS_FIELD, ObjectParser.ValueType.STRING_ARRAY); + } + + /** + * Parses a {@link RestRequest} representing a kNN search into a request builder. + */ + static KnnSearchRequestBuilder parseRestRequest(RestRequest restRequest) throws IOException { + KnnSearchRequestBuilder builder = new KnnSearchRequestBuilder( + Strings.splitStringByCommaToArray(restRequest.param("index"))); + builder.routing(restRequest.param("routing")); + + if (restRequest.hasContentOrSourceParam()) { + try (XContentParser contentParser = restRequest.contentOrSourceParamParser()) { + PARSER.parse(contentParser, builder, null); + } + } + return builder; + } + + private final String[] indices; + private String routing; + private KnnSearch knnSearch; + + private FetchSourceContext fetchSource; + private List fields; + private List docValueFields; + private StoredFieldsContext storedFields; + + private KnnSearchRequestBuilder(String[] indices) { + this.indices = indices; + } + + /** + * Defines the kNN search to execute. + */ + private void knnSearch(KnnSearch knnSearch) { + this.knnSearch = knnSearch; + } + + /** + * A comma separated list of routing values to control the shards the search will be executed on. + */ + private void routing(String routing) { + this.routing = routing; + } + + /** + * Defines how the _source should be fetched. + */ + private void fetchSource(FetchSourceContext fetchSource) { + this.fetchSource = fetchSource; + } + + /** + * A list of fields to load and return. The fields must be present in the document _source. + */ + private void fields(List fields) { + this.fields = fields; + } + + /** + * A list of docvalue fields to load and return. + */ + private void docValueFields(List docValueFields) { + this.docValueFields = docValueFields; + } + + /** + * Defines the stored fields to load and return as part of the search request. To disable the stored + * fields entirely (source and metadata fields), use {@link StoredFieldsContext#_NONE_}. + */ + private void storedFields(StoredFieldsContext storedFields) { + this.storedFields = storedFields; + } + + /** + * Adds all the request components to the given {@link SearchRequestBuilder}. + */ + public void build(SearchRequestBuilder builder) { + builder.setIndices(indices); + builder.setRouting(routing); + + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_ACCURATE); + + if (knnSearch == null) { + throw new IllegalArgumentException("missing required [" + KNN_SECTION_FIELD.getPreferredName() + "] section in search body"); + } + knnSearch.build(sourceBuilder); + + sourceBuilder.fetchSource(fetchSource); + sourceBuilder.storedFields(storedFields); + + if (fields != null) { + for (FieldAndFormat field : fields) { + sourceBuilder.fetchField(field); + } + } + + if (docValueFields != null) { + for (FieldAndFormat field : docValueFields) { + sourceBuilder.docValueField(field.field, field.format); + } + } + + builder.setSource(sourceBuilder); + } + + // visible for testing + static class KnnSearch { + private static final int NUM_CANDS_LIMIT = 10000; + static final ParseField FIELD_FIELD = new ParseField("field"); + static final ParseField K_FIELD = new ParseField("k"); + static final ParseField NUM_CANDS_FIELD = new ParseField("num_candidates"); + static final ParseField QUERY_VECTOR_FIELD = new ParseField("query_vector"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("knn", args -> { + @SuppressWarnings("unchecked") + List vector = (List) args[1]; + float[] vectorArray = new float[vector.size()]; + for (int i = 0; i < vector.size(); i++) { + vectorArray[i] = vector.get(i); + } + return new KnnSearch((String) args[0], vectorArray, (int) args[2], (int) args[3]); + }); + + static { + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareFloatArray(constructorArg(), QUERY_VECTOR_FIELD); + PARSER.declareInt(constructorArg(), K_FIELD); + PARSER.declareInt(constructorArg(), NUM_CANDS_FIELD); + } + + public static KnnSearch parse(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + final String field; + final float[] queryVector; + final int k; + final int numCands; + + /** + * Defines a kNN search. + * + * @param field the name of the vector field to search against + * @param queryVector the query vector + * @param k the final number of nearest neighbors to return as top hits + * @param numCands the number of nearest neighbor candidates to consider per shard + */ + KnnSearch(String field, float[] queryVector, int k, int numCands) { + this.field = field; + this.queryVector = queryVector; + this.k = k; + this.numCands = numCands; + } + + void build(SearchSourceBuilder builder) { + // We perform validation here instead of the constructor because it makes the errors + // much clearer. Otherwise, the error message is deeply nested under parsing exceptions. + if (k < 1) { + throw new IllegalArgumentException("[" + K_FIELD.getPreferredName() + "] must be greater than 0"); + } + if (numCands < k) { + throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot be less than " + + "[" + K_FIELD.getPreferredName() + "]"); + } + if (numCands > NUM_CANDS_LIMIT) { + throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot exceed [" + NUM_CANDS_LIMIT + "]"); + } + + builder.query(new KnnVectorQueryBuilder(field, queryVector, numCands)); + builder.size(k); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KnnSearch that = (KnnSearch) o; + return k == that.k && numCands == that.numCands + && Objects.equals(field, that.field) && Arrays.equals(queryVector, that.queryVector); + } + + @Override + public int hashCode() { + int result = Objects.hash(field, k, numCands); + result = 31 * result + Arrays.hashCode(queryVector); + return result; + } + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/RestKnnSearchAction.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/RestKnnSearchAction.java new file mode 100644 index 0000000000000..befe73d225865 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/action/RestKnnSearchAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.vectors.action; + + +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestCancellableNodeClient; +import org.elasticsearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; + +/** + * The REST action for handling kNN searches. Currently, it just parses + * the REST request into a search request and calls the search action. + */ +public class RestKnnSearchAction extends BaseRestHandler { + + public RestKnnSearchAction() {} + + @Override + public List routes() { + return List.of( + new Route(GET, "{index}/_knn_search"), + new Route(POST, "{index}/_knn_search")); + } + + @Override + public String getName() { + return "knn_search_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + // This will allow to cancel the search request if the http channel is closed + RestCancellableNodeClient cancellableNodeClient = new RestCancellableNodeClient(client, restRequest.getHttpChannel()); + KnnSearchRequestBuilder request = KnnSearchRequestBuilder.parseRestRequest(restRequest); + + SearchRequestBuilder searchRequestBuilder = cancellableNodeClient.prepareSearch(); + request.build(searchRequestBuilder); + + return channel -> searchRequestBuilder.execute(new RestStatusToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java index 51f25db3fecc6..61b899f2c5fb6 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/mapper/DenseVectorFieldMapper.java @@ -204,6 +204,10 @@ public DenseVectorFieldType(String name, Version indexVersionCreated, int dims, this.indexVersionCreated = indexVersionCreated; } + public int dims() { + return dims; + } + @Override public String typeName() { return CONTENT_TYPE; diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilder.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilder.java new file mode 100644 index 0000000000000..226a017f0d8dc --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilder.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.search.KnnVectorQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.vectors.mapper.DenseVectorFieldMapper; +import org.elasticsearch.xpack.vectors.mapper.DenseVectorFieldMapper.DenseVectorFieldType; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class KnnVectorQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "knn"; + + private final String fieldName; + private final float[] queryVector; + private final int numCands; + + public KnnVectorQueryBuilder(String fieldName, float[] queryVector, int numCands) { + this.fieldName = fieldName; + this.queryVector = queryVector; + this.numCands = numCands; + } + + public KnnVectorQueryBuilder(StreamInput in) throws IOException { + super(in); + this.fieldName = in.readString(); + this.numCands = in.readVInt(); + this.queryVector = in.readFloatArray(); + } + + public String getFieldName() { + return fieldName; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeVInt(numCands); + out.writeFloatArray(queryVector); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME) + .field("field", fieldName) + .field("vector", queryVector) + .field("num_candidates", numCands); + builder.endObject(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(SearchExecutionContext context) { + MappedFieldType fieldType = context.getFieldType(fieldName); + if (fieldType == null) { + throw new IllegalArgumentException("field [" + fieldName + "] does not exist in the mapping"); + } + + if (fieldType instanceof DenseVectorFieldType == false) { + throw new IllegalArgumentException("[" + NAME + "] queries are only supported on [" + + DenseVectorFieldMapper.CONTENT_TYPE + "] fields"); + } + + DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType; + if (queryVector.length != vectorFieldType.dims()) { + throw new IllegalArgumentException("the query vector has a different dimension [" + queryVector.length + "] " + + "than the index vectors [" + vectorFieldType.dims() + "]"); + } + if (vectorFieldType.isSearchable() == false) { + throw new IllegalArgumentException("[" + "[" + NAME + "] queries are not supported if [index] is disabled"); + } + return new KnnVectorQuery(fieldType.name(), queryVector, numCands); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, Arrays.hashCode(queryVector), numCands); + } + + @Override + protected boolean doEquals(KnnVectorQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Arrays.equals(queryVector, other.queryVector) && + numCands == other.numCands; + } +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilderTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilderTests.java new file mode 100644 index 0000000000000..183393a9b2970 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/action/KnnSearchRequestBuilderTests.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.action; + +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.search.fetch.subphase.FieldAndFormat; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.vectors.action.KnnSearchRequestBuilder.KnnSearch; +import org.elasticsearch.xpack.vectors.query.KnnVectorQueryBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchSourceBuilder; +import static org.hamcrest.Matchers.containsString; + +public class KnnSearchRequestBuilderTests extends ESTestCase { + + public void testBuildSearchRequest() throws IOException { + // Choose random REST parameters + Map params = new HashMap<>(); + String[] indices = generateRandomStringArray(5, 10, false, true); + params.put(KnnSearchRequestBuilder.INDEX_PARAM, String.join(",", indices)); + + String routing = null; + if (randomBoolean()) { + routing = randomAlphaOfLengthBetween(3, 10); + params.put(KnnSearchRequestBuilder.ROUTING_PARAM, routing); + } + + // Create random request body + KnnSearch knnSearch = randomKnnSearch(); + SearchSourceBuilder searchSource = randomSearchSourceBuilder( + () -> null, + () -> null, + () -> null, + Collections::emptyList, + () -> null, + () -> null); + XContentBuilder builder = createRequestBody(knnSearch, searchSource); + + // Convert the REST request to a search request and check the components + SearchRequestBuilder searchRequestBuilder = buildSearchRequest(builder, params); + SearchRequest searchRequest = searchRequestBuilder.request(); + + assertArrayEquals(indices, searchRequest.indices()); + assertEquals(routing, searchRequest.routing()); + + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(knnSearch.field, knnSearch.queryVector, knnSearch.numCands); + assertEquals(query, searchRequest.source().query()); + assertEquals(knnSearch.k, searchRequest.source().size()); + + assertEquals(searchSource.fetchSource(), searchRequest.source().fetchSource()); + assertEquals(searchSource.fetchFields(), searchRequest.source().fetchFields()); + assertEquals(searchSource.docValueFields(), searchRequest.source().docValueFields()); + assertEquals(searchSource.storedFields(), searchRequest.source().storedFields()); + } + + public void testParseSourceString() throws IOException { + // Create random request body + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + + KnnSearch knnSearch = randomKnnSearch(); + builder.startObject() + .startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), knnSearch.field) + .field(KnnSearch.K_FIELD.getPreferredName(), knnSearch.k) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), knnSearch.numCands) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), knnSearch.queryVector) + .endObject(); + + builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), "some-field"); + builder.endObject(); + + // Convert the REST request to a search request and check the components + SearchRequestBuilder searchRequestBuilder = buildSearchRequest(builder); + SearchRequest searchRequest = searchRequestBuilder.request(); + + FetchSourceContext fetchSource = searchRequest.source().fetchSource(); + assertTrue(fetchSource.fetchSource()); + assertArrayEquals(new String[]{"some-field"}, fetchSource.includes()); + } + + public void testParseSourceArray() throws IOException { + // Create random request body + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + + KnnSearch knnSearch = randomKnnSearch(); + builder.startObject() + .startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), knnSearch.field) + .field(KnnSearch.K_FIELD.getPreferredName(), knnSearch.k) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), knnSearch.numCands) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), knnSearch.queryVector) + .endObject(); + + builder.array(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), "field1", "field2", "field3"); + builder.endObject(); + + // Convert the REST request to a search request and check the components + SearchRequestBuilder searchRequestBuilder = buildSearchRequest(builder); + SearchRequest searchRequest = searchRequestBuilder.request(); + + FetchSourceContext fetchSource = searchRequest.source().fetchSource(); + assertTrue(fetchSource.fetchSource()); + assertArrayEquals(new String[]{"field1", "field2", "field3"}, fetchSource.includes()); + } + + public void testMissingKnnSection() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()).startObject() + .array(SearchSourceBuilder.FETCH_FIELDS_FIELD.getPreferredName(), "field1", "field2") + .endObject(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildSearchRequest(builder)); + assertThat(e.getMessage(), containsString("missing required [knn] section in search body")); + } + + public void testNumCandsLessThanK() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()).startObject() + .startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), "field") + .field(KnnSearch.K_FIELD.getPreferredName(), 100) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), 80) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), new float[]{1.0f, 2.0f, 3.0f}) + .endObject() + .endObject(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildSearchRequest(builder)); + assertThat(e.getMessage(), containsString("[num_candidates] cannot be less than [k]")); + } + + public void testNumCandsExceedsLimit() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()).startObject() + .startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), "field") + .field(KnnSearch.K_FIELD.getPreferredName(), 100) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), 10002) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), new float[]{1.0f, 2.0f, 3.0f}) + .endObject() + .endObject(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildSearchRequest(builder)); + assertThat(e.getMessage(), containsString("[num_candidates] cannot exceed [10000]")); + } + + public void testInvalidK() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()).startObject() + .startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), "field") + .field(KnnSearch.K_FIELD.getPreferredName(), 0) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), 10) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), new float[]{1.0f, 2.0f, 3.0f}) + .endObject() + .endObject(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildSearchRequest(builder)); + assertThat(e.getMessage(), containsString("[k] must be greater than 0")); + } + + private SearchRequestBuilder buildSearchRequest(XContentBuilder builder) throws IOException { + Map params = Map.of(KnnSearchRequestBuilder.INDEX_PARAM, "index"); + return buildSearchRequest(builder, params); + } + + private SearchRequestBuilder buildSearchRequest(XContentBuilder builder, Map params) throws IOException { + KnnSearchRequestBuilder knnRequestBuilder = KnnSearchRequestBuilder.parseRestRequest( + new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.POST) + .withParams(params) + .withContent(BytesReference.bytes(builder), builder.contentType()) + .build()); + SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(null, SearchAction.INSTANCE); + knnRequestBuilder.build(searchRequestBuilder); + return searchRequestBuilder; + } + + private KnnSearch randomKnnSearch() { + String field = randomAlphaOfLength(6); + int dim = randomIntBetween(2, 30); + float[] vector = new float[dim]; + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + + int k = randomIntBetween(1, 100); + int numCands = randomIntBetween(k, 1000); + return new KnnSearch(field, vector, k, numCands); + } + + private XContentBuilder createRequestBody(KnnSearch knnSearch, SearchSourceBuilder searchSource) throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + builder.startObject(); + + builder.startObject(KnnSearchRequestBuilder.KNN_SECTION_FIELD.getPreferredName()) + .field(KnnSearch.FIELD_FIELD.getPreferredName(), knnSearch.field) + .field(KnnSearch.K_FIELD.getPreferredName(), knnSearch.k) + .field(KnnSearch.NUM_CANDS_FIELD.getPreferredName(), knnSearch.numCands) + .field(KnnSearch.QUERY_VECTOR_FIELD.getPreferredName(), knnSearch.queryVector) + .endObject(); + + if (searchSource.fetchSource() != null) { + builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName()); + searchSource.fetchSource().toXContent(builder, ToXContent.EMPTY_PARAMS); + } + + if (searchSource.fetchFields() != null) { + builder.startArray(SearchSourceBuilder.FETCH_FIELDS_FIELD.getPreferredName()); + for (FieldAndFormat fieldAndFormat : searchSource.fetchFields()) { + fieldAndFormat.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endArray(); + } + + if (searchSource.docValueFields() != null) { + builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName()); + for (FieldAndFormat fieldAndFormat : searchSource.docValueFields()) { + fieldAndFormat.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endArray(); + } + + if (searchSource.storedFields() != null) { + searchSource.storedFields().toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder); + } + + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnSearchActionTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnSearchActionTests.java new file mode 100644 index 0000000000000..0a8235204ce93 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnSearchActionTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.vectors.DenseVectorPlugin; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; + +public class KnnSearchActionTests extends ESSingleNodeTestCase { + private static final int VECTOR_DIMENSION = 10; + + @Override + protected Collection> getPlugins() { + return List.of(DenseVectorPlugin.class); + } + + public void testTotalHits() throws IOException { + Settings indexSettings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("properties") + .startObject("vector") + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .field("index", true) + .field("similarity", "l2_norm") + .endObject() + .endObject().endObject(); + createIndex("index1", indexSettings, builder); + createIndex("index2", indexSettings, builder); + + for (int doc = 0; doc < 10; doc++) { + client().prepareIndex("index1") + .setId(String.valueOf(doc)) + .setSource("vector", randomVector()) + .get(); + client().prepareIndex("index2") + .setId(String.valueOf(doc)) + .setSource("vector", randomVector()) + .get(); + } + + client().admin().indices().prepareForceMerge("index1", "index2").setMaxNumSegments(1).get(); + client().admin().indices().prepareRefresh("index1", "index2").get(); + + // Since there's no kNN search action at the transport layer, we just emulate + // how the action works (it builds a kNN query under the hood) + float[] queryVector = randomVector(); + SearchResponse response = client().prepareSearch("index1", "index2") + .setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5)) + .setSize(2) + .get(); + + // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard + assertHitCount(response, 5 * 2); + assertEquals(2, response.getHits().getHits().length); + } + + private float[] randomVector() { + float[] vector = new float[VECTOR_DIMENSION]; + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + return vector; + } +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilderTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilderTests.java new file mode 100644 index 0000000000000..548ae62410977 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnVectorQueryBuilderTests.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.search.KnnVectorQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractBuilderTestCase; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.vectors.DenseVectorPlugin; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.hamcrest.Matchers.containsString; + +public class KnnVectorQueryBuilderTests extends AbstractQueryTestCase { + private static final String VECTOR_FIELD = "vector"; + private static final String VECTOR_ALIAS_FIELD = "vector_alias"; + private static final String UNINDEXED_VECTOR_FIELD = "unindexed_vector"; + private static final int VECTOR_DIMENSION = 3; + + @Override + protected Collection> getPlugins() { + return Arrays.asList(DenseVectorPlugin.class, TestGeoShapeFieldMapperPlugin.class); + } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("properties") + .startObject(VECTOR_FIELD) + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .field("index", true) + .field("similarity", "l2_norm") + .endObject() + .startObject(VECTOR_ALIAS_FIELD) + .field("type", "alias") + .field("path", VECTOR_FIELD) + .endObject() + .startObject(UNINDEXED_VECTOR_FIELD) + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .endObject() + .endObject().endObject(); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(builder)), MapperService.MergeReason.MAPPING_UPDATE); + } + + @Override + protected KnnVectorQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomBoolean() ? VECTOR_FIELD : VECTOR_ALIAS_FIELD; + + float[] vector = new float[VECTOR_DIMENSION]; + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + int numCands = randomIntBetween(1, 1000); + return new KnnVectorQueryBuilder(fieldName, vector, numCands); + } + + @Override + protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { + // TODO: expose getters on KnnVectorQuery and assert more here + assertTrue(query instanceof KnnVectorQuery); + } + + public void testWrongDimension() { + SearchExecutionContext context = createSearchExecutionContext(); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] {1.0f, 2.0f}, 10); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); + assertThat(e.getMessage(), containsString("the query vector has a different dimension [2] than the index vectors [3]")); + } + + public void testNonexistentField() { + SearchExecutionContext context = createSearchExecutionContext(); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("nonexistent", + new float[]{1.0f, 1.0f, 1.0f}, 10); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); + assertThat(e.getMessage(), containsString("field [nonexistent] does not exist in the mapping")); + } + + public void testWrongFieldType() { + SearchExecutionContext context = createSearchExecutionContext(); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(AbstractBuilderTestCase.KEYWORD_FIELD_NAME, + new float[]{1.0f, 1.0f, 1.0f}, 10); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); + assertThat(e.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields")); + } + + public void testUnindexedField() { + SearchExecutionContext context = createSearchExecutionContext(); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(UNINDEXED_VECTOR_FIELD, + new float[]{1.0f, 1.0f, 1.0f}, 10); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); + assertThat(e.getMessage(), containsString("[knn] queries are not supported if [index] is disabled")); + } + + @Override + public void testValidOutput() { + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] {1.0f, 2.0f, 3.0f}, 10); + String expected = "{\n" + + " \"knn\" : {\n" + + " \"field\" : \"vector\",\n" + + " \"vector\" : [\n" + + " 1.0,\n" + + " 2.0,\n" + + " 3.0\n" + + " ],\n" + + " \"num_candidates\" : 10\n" + + " }\n" + + "}"; + assertEquals(expected, query.toString()); + } + + @Override + public void testUnknownObjectException() throws IOException { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testFromXContent() throws IOException { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testUnknownField() throws IOException { + // Test isn't relevant, since query is never parsed from xContent + } +} diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java index 7d02b82b60afa..d09dc73c7b119 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java @@ -42,6 +42,11 @@ public class ExampleSecurityExtension implements SecurityExtension { }); } + @Override + public String extensionName() { + return "example"; + } + @Override public Map getRealms(SecurityComponents components) { return Map.ofEntries(