Skip to content

Commit

Permalink
Merge branch 'master' into create-sparse_new
Browse files Browse the repository at this point in the history
  • Loading branch information
tlrx committed Oct 19, 2021
2 parents 8b78ed2 + 3733583 commit 66b1e07
Show file tree
Hide file tree
Showing 149 changed files with 2,835 additions and 608 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ private Object[] getTargets(String projectPath) {
return new String[] {
"src/*/java/org/elasticsearch/action/admin/cluster/repositories/**/*.java",
"src/*/java/org/elasticsearch/action/admin/cluster/snapshots/**/*.java",
"src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java",
"src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java",
"src/*/java/org/elasticsearch/index/IndexMode.java",
"src/*/java/org/elasticsearch/index/IndexRouting.java",
Expand All @@ -98,6 +97,10 @@ private Object[] getTargets(String projectPath) {
"src/*/java/org/elasticsearch/repositories/**/*.java",
"src/*/java/org/elasticsearch/search/aggregations/**/*.java",
"src/*/java/org/elasticsearch/snapshots/**/*.java" };
} else if (projectPath.equals(":test:framework")) {
return new String[] {
"src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java",
};
} else {
// Normally this isn"t necessary, but we have Java sources in
// non-standard places
Expand Down Expand Up @@ -203,7 +206,6 @@ private Object[] getTargets(String projectPath) {
":test:fixtures:geoip-fixture",
":test:fixtures:krb5kdc-fixture",
":test:fixtures:old-elasticsearch",
":test:framework",
":test:logger-usage",
":x-pack:docs",
":x-pack:license-tools",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;

import org.gradle.api.model.ObjectFactory;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.BufferedWriter;
Expand All @@ -51,6 +51,8 @@
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.inject.Inject;
import java.io.Serializable;

/**
* Checks files for license headers..
Expand Down Expand Up @@ -95,17 +97,18 @@ public List<String> getExcludes() {
return excludes;
}

public Map<String, String> getAdditionalLicenses() {
return additionalLicenses;
}

public void setExcludes(List<String> excludes) {
this.excludes = excludes;
}

@OutputFile
private File reportFile = new File(getProject().getBuildDir(), "reports/licenseHeaders/rat.xml");

private static List<License> conventionalLicenses = Arrays.asList(
// Dual SSPLv1 and Elastic
new License("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server")
);

/**
* Allowed license families for this project.
*/
Expand All @@ -118,13 +121,17 @@ public void setExcludes(List<String> excludes) {
*/
@Input
private List<String> excludes = new ArrayList<String>();

private ListProperty<License> additionalLicenses;

/**
* Additional license families that may be found. The key is the license category name (5 characters),
* followed by the family name and the value list of patterns to search for.
*/
@Input
protected Map<String, String> additionalLicenses = new HashMap<String, String>();

public ListProperty<License> getAdditionalLicenses() {
return additionalLicenses;
}
/**
* Add a new license type.
* <p>
Expand All @@ -139,7 +146,12 @@ public void additionalLicense(final String categoryName, String familyName, Stri
throw new IllegalArgumentException("License category name must be exactly 5 characters, got " + categoryName);
}

additionalLicenses.put(categoryName + familyName, pattern);
additionalLicenses.add(new License(categoryName, familyName, pattern));
}

@Inject
public LicenseHeadersTask(ObjectFactory objectFactory) {
additionalLicenses = objectFactory.listProperty(License.class).convention(conventionalLicenses);
}

@TaskAction
Expand All @@ -160,14 +172,10 @@ public void runRat() {
matchers.add(subStringMatcher("GEN ", "Generated", "ANTLR GENERATED CODE"));
// Vendored Code
matchers.add(subStringMatcher("VEN ", "Vendored", "@notice"));
// Dual SSPLv1 and Elastic
matchers.add(subStringMatcher("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server"));

for (Map.Entry<String, String> additional : additionalLicenses.entrySet()) {
String category = additional.getKey().substring(0, 5);
String family = additional.getKey().substring(5);
matchers.add(subStringMatcher(category, family, additional.getValue()));
}
additionalLicenses.get().forEach(l ->
matchers.add(subStringMatcher(l.licenseFamilyCategory, l.licenseFamilyName, l.substringPattern))
);

reportConfiguration.setHeaderMatcher(new HeaderMatcherMultiplexer(matchers.toArray(IHeaderMatcher[]::new)));
reportConfiguration.setApprovedLicenseNames(approvedLicenses.stream().map(license -> {
Expand All @@ -190,7 +198,6 @@ private IHeaderMatcher subStringMatcher(String licenseFamilyCategory, String lic
SubstringLicenseMatcher substringLicenseMatcher = new SubstringLicenseMatcher();
substringLicenseMatcher.setLicenseFamilyCategory(licenseFamilyCategory);
substringLicenseMatcher.setLicenseFamilyName(licenseFamilyName);

SubstringLicenseMatcher.Pattern pattern = new SubstringLicenseMatcher.Pattern();
pattern.setSubstring(substringPattern);
substringLicenseMatcher.addConfiguredPattern(pattern);
Expand Down Expand Up @@ -249,4 +256,16 @@ private static List<Element> elementList(NodeList resourcesNodes) {
}
return nodeList;
}

static class License implements Serializable {
private String licenseFamilyCategory;
private String licenseFamilyName;
private String substringPattern;

public License(String licenseFamilyCategory, String licenseFamilyName, String substringPattern) {
this.licenseFamilyCategory = licenseFamilyCategory;
this.licenseFamilyName = licenseFamilyName;
this.substringPattern = substringPattern;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,45 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest {
result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS
}

def "supports sspl by convention"() {
given:
buildFile << """
plugins {
id 'java'
id 'elasticsearch.internal-licenseheaders'
}
"""
dualLicensedFile()

when:
def result = gradleRunner("licenseHeaders").build()

then:
result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS
}

def "sspl default additional license can be overridden"() {
given:
buildFile << """
plugins {
id 'java'
id 'elasticsearch.internal-licenseheaders'
}
tasks.named("licenseHeaders").configure {
additionalLicense 'ELAST', 'Elastic License 2.0', '2.0; you may not use this file except in compliance with the Elastic License'
}
"""
elasticLicensed()
dualLicensedFile()

when:
def result = gradleRunner("licenseHeaders").buildAndFail()

then:
result.task(":licenseHeaders").outcome == TaskOutcome.FAILED
}

private File unapprovedSourceFile(String filePath = "src/main/java/org/acme/UnapprovedLicensed.java") {
File sourceFile = file(filePath);
sourceFile << """
Expand Down Expand Up @@ -115,6 +154,21 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest {
"""
}

private File elasticLicensed() {
file("src/main/java/org/acme/ElasticLicensed.java") << """
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.acme;
public class ElasticLicensed {
}
"""
}

private String packageString(File sourceFile) {
String normalizedPath = normalized(sourceFile.getPath())
(normalizedPath.substring(normalizedPath.indexOf("src/main/java")) - "src/main/java/" - ("/" + sourceFile.getName())).replaceAll("/", ".")
Expand Down
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ tasks.register("verifyVersions") {
* after the backport of the backcompat code is complete.
*/

boolean bwc_tests_enabled = true
boolean bwc_tests_enabled = false
// place a PR link here when committing bwc changes:
String bwc_tests_disabled_issue = ""
String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/79385"
/*
* FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a
* JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.SniffConnectionStrategy;
import org.elasticsearch.xcontent.XContentType;

import java.io.IOException;
import java.util.HashMap;
Expand Down Expand Up @@ -316,7 +316,7 @@ public void testClusterHealthNotFoundIndex() throws IOException {
assertThat(response.getStatus(), equalTo(ClusterHealthStatus.RED));
assertNoIndices(response);
assertWarnings("The HTTP status code for a cluster health timeout will be changed from 408 to 200 in a " +
"future version. Set the [es.cluster_health.request_timeout_200] system property to [true] to suppress this message and " +
"future version. Set the [return_200_for_cluster_health_timeout] query parameter to [true] to suppress this message and " +
"opt in to the future behaviour now.");
}

Expand Down
2 changes: 1 addition & 1 deletion distribution/src/bin/elasticsearch
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ if [[ $ATTEMPT_SECURITY_AUTO_CONFIG = true ]]; then
if ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.ConfigInitialNode \
ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
"`dirname "$0"`"/elasticsearch-cli "$@" <<<"$KEYSTORE_PASSWORD"; then
bin/elasticsearch-cli "$@" <<<"$KEYSTORE_PASSWORD"; then
:
else
retval=$?
Expand Down
5 changes: 5 additions & 0 deletions docs/reference/cluster/health.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms]
provided or better, i.e. `green` > `yellow` > `red`. By default, will not
wait for any status.

`return_200_for_cluster_health_timeout`::
(Optional, Boolean) A boolean value which controls whether to return HTTP 200
status code instead of HTTP 408 in case of a cluster health timeout from
the server side. Defaults to false.

[[cluster-health-api-response-body]]
==== {api-response-body-title}

Expand Down
2 changes: 1 addition & 1 deletion docs/reference/getting-started.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ include::{es-repo-dir}/tab-widgets/quick-start-cleanup-widget.asciidoc[]

* Use {fleet} and {agent} to collect logs and metrics directly from your data
sources and send them to {es}. See the
{fleet-guide}/fleet-quick-start.html[{fleet} quick start guide].
{observability-guide}/ingest-logs-metrics-uptime.html[Ingest logs, metrics, and uptime data with {agent}].

* Use {kib} to explore, visualize, and manage your {es} data. See the
{kibana-ref}/get-started.html[{kib} quick start guide].
3 changes: 1 addition & 2 deletions docs/reference/ingest.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -432,8 +432,7 @@ If you run {agent} standalone, you can apply pipelines using an
<<index-default-pipeline,`index.default_pipeline`>> or
<<index-final-pipeline,`index.final_pipeline`>> index setting. Alternatively,
you can specify the `pipeline` policy setting in your `elastic-agent.yml`
configuration. See {fleet-guide}/run-elastic-agent-standalone.html[Run {agent}
standalone].
configuration. See {fleet-guide}/install-standalone-elastic-agent.html[Install standalone {agent}s].

[discrete]
[[access-source-fields]]
Expand Down
7 changes: 7 additions & 0 deletions docs/reference/modules/indices/recovery.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -100,3 +100,10 @@ sent in parallel to the target node for each recovery. Defaults to `5`.
+
Do not increase this setting without carefully verifying that your cluster has
the resources available to handle the extra load that will result.

`indices.recovery.max_concurrent_snapshot_file_downloads_per_node`::
(<<cluster-update-settings,Dynamic>>, Expert) Number of snapshot file downloads requests
execyted in parallel in the target node for all recoveries. Defaults to `25`.
+
Do not increase this setting without carefully verifying that your cluster has
the resources available to handle the extra load that will result.
Loading

0 comments on commit 66b1e07

Please sign in to comment.