Skip to content

Commit

Permalink
Merge branch 'master' into compat_rest_api
Browse files Browse the repository at this point in the history
  • Loading branch information
pgomulka committed Apr 28, 2020
2 parents 144dd89 + ca2f983 commit 7f54c45
Show file tree
Hide file tree
Showing 319 changed files with 5,781 additions and 2,927 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -182,60 +182,6 @@ class BuildPlugin implements Plugin<Project> {
}
}

/** Add a check before gradle execution phase which ensures java home for the given java version is set. */
static void requireJavaHome(Task task, int version) {
// use root project for global accounting
Project rootProject = task.project.rootProject
ExtraPropertiesExtension extraProperties = rootProject.extensions.extraProperties

// hacky way (but the only way) to find if the task graph has already been populated
boolean taskGraphReady
try {
rootProject.gradle.taskGraph.getAllTasks()
taskGraphReady = true
} catch (IllegalStateException) {
taskGraphReady = false
}

if (taskGraphReady) {
// check directly if the version is present since we are already executing
if (BuildParams.javaVersions.find { it.version == version } == null) {
throw new GradleException("JAVA${version}_HOME required to run task:\n${task}")
}
} else {
// setup list of java versions we will check at the end of configuration time
if (extraProperties.has('requiredJavaVersions') == false) {
extraProperties.set('requiredJavaVersions', [:])
rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph ->
List<String> messages = []
Map<Integer, List<Task>> requiredJavaVersions = (Map<Integer, List<Task>>) extraProperties.get('requiredJavaVersions')
for (Map.Entry<Integer, List<Task>> entry : requiredJavaVersions) {
if (BuildParams.javaVersions.any { it.version == entry.key }) {
continue
}
List<String> tasks = entry.value.findAll { taskGraph.hasTask(it) }.collect { " ${it.path}".toString() }
if (tasks.isEmpty() == false) {
messages.add("JAVA${entry.key}_HOME required to run tasks:\n${tasks.join('\n')}".toString())
}
}
if (messages.isEmpty() == false) {
throw new GradleException(messages.join('\n'))
}
}
}
Map<Integer, List<Task>> requiredJavaVersions = (Map<Integer, List<Task>>) extraProperties.get('requiredJavaVersions')
requiredJavaVersions.putIfAbsent(version, [])
requiredJavaVersions.get(version).add(task)
}
}

/** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */
static String getJavaHome(final Task task, final int version) {
requireJavaHome(task, version)
JavaHome java = BuildParams.javaVersions.find { it.version == version }
return java == null ? null : java.javaHome.get().absolutePath
}

/**
* Makes dependencies non-transitive.
*
Expand Down
37 changes: 37 additions & 0 deletions buildSrc/src/main/java/org/elasticsearch/gradle/util/JavaUtil.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.gradle.util;

import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.info.JavaHome;
import org.gradle.api.GradleException;

import java.util.List;
import java.util.Optional;

public class JavaUtil {

/** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */
static String getJavaHome(final int version) {
List<JavaHome> javaHomes = BuildParams.getJavaVersions();
Optional<JavaHome> java = javaHomes.stream().filter(j -> j.getVersion() == version).findFirst();
return java.orElseThrow(() -> new GradleException("JAVA" + version + "_HOME required")).getJavaHome().get().getAbsolutePath();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ public class ModelSizeStats implements ToXContentObject {
public static final ParseField FREQUENT_CATEGORY_COUNT_FIELD = new ParseField("frequent_category_count");
public static final ParseField RARE_CATEGORY_COUNT_FIELD = new ParseField("rare_category_count");
public static final ParseField DEAD_CATEGORY_COUNT_FIELD = new ParseField("dead_category_count");
public static final ParseField FAILED_CATEGORY_COUNT_FIELD = new ParseField("failed_category_count");
public static final ParseField CATEGORIZATION_STATUS_FIELD = new ParseField("categorization_status");
public static final ParseField LOG_TIME_FIELD = new ParseField("log_time");
public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp");
Expand All @@ -81,6 +82,7 @@ public class ModelSizeStats implements ToXContentObject {
PARSER.declareLong(Builder::setFrequentCategoryCount, FREQUENT_CATEGORY_COUNT_FIELD);
PARSER.declareLong(Builder::setRareCategoryCount, RARE_CATEGORY_COUNT_FIELD);
PARSER.declareLong(Builder::setDeadCategoryCount, DEAD_CATEGORY_COUNT_FIELD);
PARSER.declareLong(Builder::setFailedCategoryCount, FAILED_CATEGORY_COUNT_FIELD);
PARSER.declareField(Builder::setCategorizationStatus,
p -> CategorizationStatus.fromString(p.text()), CATEGORIZATION_STATUS_FIELD, ValueType.STRING);
PARSER.declareField(Builder::setLogTime,
Expand Down Expand Up @@ -143,15 +145,16 @@ public String toString() {
private final long frequentCategoryCount;
private final long rareCategoryCount;
private final long deadCategoryCount;
private final long failedCategoryCount;
private final CategorizationStatus categorizationStatus;
private final Date timestamp;
private final Date logTime;

private ModelSizeStats(String jobId, long modelBytes, Long modelBytesExceeded, Long modelBytesMemoryLimit, long totalByFieldCount,
long totalOverFieldCount, long totalPartitionFieldCount, long bucketAllocationFailuresCount,
MemoryStatus memoryStatus, long categorizedDocCount, long totalCategoryCount, long frequentCategoryCount,
long rareCategoryCount, long deadCategoryCount, CategorizationStatus categorizationStatus,
Date timestamp, Date logTime) {
long rareCategoryCount, long deadCategoryCount, long failedCategoryCount,
CategorizationStatus categorizationStatus, Date timestamp, Date logTime) {
this.jobId = jobId;
this.modelBytes = modelBytes;
this.modelBytesExceeded = modelBytesExceeded;
Expand All @@ -166,6 +169,7 @@ private ModelSizeStats(String jobId, long modelBytes, Long modelBytesExceeded, L
this.frequentCategoryCount = frequentCategoryCount;
this.rareCategoryCount = rareCategoryCount;
this.deadCategoryCount = deadCategoryCount;
this.failedCategoryCount = failedCategoryCount;
this.categorizationStatus = categorizationStatus;
this.timestamp = timestamp;
this.logTime = logTime;
Expand Down Expand Up @@ -194,6 +198,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(FREQUENT_CATEGORY_COUNT_FIELD.getPreferredName(), frequentCategoryCount);
builder.field(RARE_CATEGORY_COUNT_FIELD.getPreferredName(), rareCategoryCount);
builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount);
builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount);
builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus);
builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime());
if (timestamp != null) {
Expand Down Expand Up @@ -260,6 +265,10 @@ public long getDeadCategoryCount() {
return deadCategoryCount;
}

public long getFailedCategoryCount() {
return failedCategoryCount;
}

public CategorizationStatus getCategorizationStatus() {
return categorizationStatus;
}
Expand All @@ -286,7 +295,7 @@ public Date getLogTime() {
public int hashCode() {
return Objects.hash(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount,
totalPartitionFieldCount, this.bucketAllocationFailuresCount, memoryStatus, categorizedDocCount, totalCategoryCount,
frequentCategoryCount, rareCategoryCount, deadCategoryCount, categorizationStatus, timestamp, logTime);
frequentCategoryCount, rareCategoryCount, deadCategoryCount, failedCategoryCount, categorizationStatus, timestamp, logTime);
}

/**
Expand Down Expand Up @@ -314,6 +323,7 @@ public boolean equals(Object other) {
&& this.frequentCategoryCount == that.frequentCategoryCount
&& this.rareCategoryCount == that.rareCategoryCount
&& this.deadCategoryCount == that.deadCategoryCount
&& this.failedCategoryCount == that.failedCategoryCount
&& Objects.equals(this.categorizationStatus, that.categorizationStatus)
&& Objects.equals(this.timestamp, that.timestamp)
&& Objects.equals(this.logTime, that.logTime)
Expand All @@ -336,6 +346,7 @@ public static class Builder {
private long frequentCategoryCount;
private long rareCategoryCount;
private long deadCategoryCount;
private long failedCategoryCount;
private CategorizationStatus categorizationStatus;
private Date timestamp;
private Date logTime;
Expand All @@ -362,6 +373,7 @@ public Builder(ModelSizeStats modelSizeStats) {
this.frequentCategoryCount = modelSizeStats.frequentCategoryCount;
this.rareCategoryCount = modelSizeStats.rareCategoryCount;
this.deadCategoryCount = modelSizeStats.deadCategoryCount;
this.failedCategoryCount = modelSizeStats.failedCategoryCount;
this.categorizationStatus = modelSizeStats.categorizationStatus;
this.timestamp = modelSizeStats.timestamp;
this.logTime = modelSizeStats.logTime;
Expand Down Expand Up @@ -433,6 +445,11 @@ public Builder setDeadCategoryCount(long deadCategoryCount) {
return this;
}

public Builder setFailedCategoryCount(long failedCategoryCount) {
this.failedCategoryCount = failedCategoryCount;
return this;
}

public Builder setCategorizationStatus(CategorizationStatus categorizationStatus) {
Objects.requireNonNull(categorizationStatus, "[" + CATEGORIZATION_STATUS_FIELD.getPreferredName() + "] must not be null");
this.categorizationStatus = categorizationStatus;
Expand All @@ -452,7 +469,7 @@ public Builder setLogTime(Date logTime) {
public ModelSizeStats build() {
return new ModelSizeStats(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount,
totalPartitionFieldCount, bucketAllocationFailuresCount, memoryStatus, categorizedDocCount, totalCategoryCount,
frequentCategoryCount, rareCategoryCount, deadCategoryCount, categorizationStatus, timestamp, logTime);
frequentCategoryCount, rareCategoryCount, deadCategoryCount, failedCategoryCount, categorizationStatus, timestamp, logTime);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -200,11 +200,11 @@ public void testGetUsers() throws Exception {
List<User> users = new ArrayList<>(3);
users.addAll(response.getUsers());
assertNotNull(response);
// 9 users are expected to be returned
// 10 users are expected to be returned
// test_users (3): user1, user2, user3
// system_users (6): elastic, beats_system, apm_system, logstash_system, kibana, remote_monitoring_user
// system_users (6): elastic, beats_system, apm_system, logstash_system, kibana, kibana_system, remote_monitoring_user
logger.info(users);
assertThat(users.size(), equalTo(9));
assertThat(users.size(), equalTo(10));
}

{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ public void testDefaultConstructor() {
assertEquals(0, stats.getFrequentCategoryCount());
assertEquals(0, stats.getRareCategoryCount());
assertEquals(0, stats.getDeadCategoryCount());
assertEquals(0, stats.getFailedCategoryCount());
assertEquals(CategorizationStatus.OK, stats.getCategorizationStatus());
}

Expand Down Expand Up @@ -109,6 +110,9 @@ public static ModelSizeStats createRandomized() {
if (randomBoolean()) {
stats.setDeadCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setFailedCategoryCount(randomNonNegativeLong());
}
if (randomBoolean()) {
stats.setCategorizationStatus(randomFrom(CategorizationStatus.values()));
}
Expand Down
6 changes: 3 additions & 3 deletions distribution/bwc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.gradle.util.GradleVersion

import java.nio.charset.StandardCharsets

import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
import static org.elasticsearch.gradle.util.JavaUtil.getJavaHome

/**
* We want to be able to do BWC tests for unreleased versions without relying on and waiting for snapshots.
Expand Down Expand Up @@ -166,7 +166,7 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines()
environment(
'JAVA_HOME',
getJavaHome(it, Integer.parseInt(
getJavaHome(Integer.parseInt(
lines
.findAll({ it.startsWith("ES_BUILD_JAVA=") })
.collect({ it.replace("ES_BUILD_JAVA=java", "").trim() })
Expand All @@ -176,7 +176,7 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
)
environment(
'RUNTIME_JAVA_HOME',
getJavaHome(it, Integer.parseInt(
getJavaHome(Integer.parseInt(
lines
.findAll({ it.startsWith("ES_RUNTIME_JAVA=java") })
.collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() })
Expand Down
15 changes: 12 additions & 3 deletions distribution/docker/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -28,23 +28,32 @@ dependencies {
}

ext.expansions = { architecture, oss, local ->
String base_image = null
String tini_arch = null
String classifier = null
switch (architecture) {
case "aarch64":
base_image = "arm64v8/centos:7"
tini_arch = "arm64"
classifier = "linux-aarch64"
break;
case "x64":
base_image = "amd64/centos:7"
tini_arch = "amd64"
classifier = "linux-x86_64"
break;
default:
throw new IllegalArgumentException("unrecongized architecture [" + architecture + "], must be one of (aarch64|x64)")
}
final String classifier = "aarch64".equals(architecture) ? "linux-aarch64" : "linux-x86_64"
final String elasticsearch = oss ? "elasticsearch-oss-${VersionProperties.elasticsearch}-${classifier}.tar.gz" : "elasticsearch-${VersionProperties.elasticsearch}-${classifier}.tar.gz"
return [
'base_image' : "aarch64".equals(architecture) ? "arm64v8/centos:7" : "centos:7",
'base_image' : base_image,
'build_date' : BuildParams.buildDate,
'elasticsearch' : elasticsearch,
'git_revision' : BuildParams.gitRevision,
'license' : oss ? 'Apache-2.0' : 'Elastic-License',
'source_elasticsearch': local ? "COPY $elasticsearch /opt/" : "RUN cd /opt && curl --retry 8 -s -L -O https://artifacts.elastic.co/downloads/elasticsearch/${elasticsearch} && cd -",
'tini_suffix' : "aarch64".equals(architecture) ? "-arm64" : "",
'tini_arch' : tini_arch,
'version' : VersionProperties.elasticsearch
]
}
Expand Down
4 changes: 2 additions & 2 deletions distribution/docker/docker-aarch64-build-context/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ apply plugin: 'base'
task buildDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-aarch64"
archiveClassifier = "docker-build-context-aarch64"
archiveBaseName = "elasticsearch"
with dockerBuildContext("aarch64", false, false)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ apply plugin: 'base'
task buildOssDockerBuildContext(type: Tar) {
extension = 'tar.gz'
compression = Compression.GZIP
archiveClassifier = "docker-build-context"
archiveBaseName = "elasticsearch-aarch64-oss"
archiveClassifier = "docker-build-context-aarch64"
archiveBaseName = "elasticsearch-oss"
with dockerBuildContext("aarch64", true, false)
}

Expand Down
25 changes: 13 additions & 12 deletions distribution/docker/src/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,22 @@
FROM ${base_image} AS builder

RUN for iter in {1..10}; do yum update --setopt=tsflags=nodocs -y && \
yum install --setopt=tsflags=nodocs -y gzip shadow-utils tar && \
yum install --setopt=tsflags=nodocs -y wget gzip shadow-utils tar && \
yum clean all && exit_code=0 && break || exit_code=\$? && echo "yum error: retry \$iter in 10s" && sleep 10; done; \
(exit \$exit_code)

# `tini` is a tiny but valid init for containers. This is used to cleanly
# control how ES and any child processes are shut down.
#
# The tini GitHub page gives instructions for verifying the binary using
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against a checksum that they provide.
RUN wget --no-cookies --quiet https://github.com/krallin/tini/releases/download/v0.19.0/tini-${tini_arch} \
&& wget --no-cookies --quiet https://github.com/krallin/tini/releases/download/v0.19.0/tini-${tini_arch}.sha256sum \
&& sha256sum -c tini-${tini_arch}.sha256sum \
&& mv tini-${tini_arch} /tini \
&& chmod +x /tini

ENV PATH /usr/share/elasticsearch/bin:\$PATH

RUN groupadd -g 1000 elasticsearch && \
Expand All @@ -35,17 +47,6 @@ RUN chmod 0775 config config/jvm.options.d data logs
COPY config/elasticsearch.yml config/log4j2.properties config/
RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties

# `tini` is a tiny but valid init for containers. This is used to cleanly
# control how ES and any child processes are shut down.
#
# The tini GitHub page gives instructions for verifying the binary using
# gpg, but the keyservers are slow to return the key and this can fail the
# build. Instead, we check the binary against a checksum that we have
# computed.
ADD https://github.com/krallin/tini/releases/download/v0.18.0/tini${tini_suffix} /tini
COPY config/tini${tini_suffix}.sha512 /tini.sha512
RUN sha512sum -c /tini.sha512 && chmod +x /tini

################################################################################
# Build stage 1 (the actual elasticsearch image):
# Copy elasticsearch from stage 0
Expand Down
1 change: 0 additions & 1 deletion distribution/docker/src/docker/config/tini-arm64.sha512

This file was deleted.

1 change: 0 additions & 1 deletion distribution/docker/src/docker/config/tini.sha512

This file was deleted.

Loading

0 comments on commit 7f54c45

Please sign in to comment.