Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into apm-downsample
Browse files Browse the repository at this point in the history
  • Loading branch information
danielmitterdorfer committed Jan 26, 2024
2 parents 995980c + 2a5cd78 commit 6b78ddc
Show file tree
Hide file tree
Showing 822 changed files with 9,619 additions and 2,166 deletions.
9 changes: 9 additions & 0 deletions .buildkite/scripts/fixture-deploy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

set -euo pipefail

echo "$DOCKER_REGISTRY_PASSWORD" | docker login -u "$DOCKER_REGISTRY_USERNAME" --password-stdin docker.elastic.co
unset DOCKER_REGISTRY_USERNAME DOCKER_REGISTRY_PASSWORD

docker buildx create --use
.ci/scripts/run-gradle.sh deployFixtureDockerImages
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ public TaskProvider<? extends Task> createTask(Project project) {
.register("validate" + publicationName + "Pom", PomValidationTask.class);
validatePom.configure(t -> t.dependsOn(validateTask));
validateTask.configure(task -> {
GenerateMavenPom generateMavenPom = project.getTasks()
TaskProvider<GenerateMavenPom> generateMavenPom = project.getTasks()
.withType(GenerateMavenPom.class)
.getByName("generatePomFileFor" + publicationName + "Publication");
.named("generatePomFileFor" + publicationName + "Publication");
task.dependsOn(generateMavenPom);
task.getPomFile().fileValue(generateMavenPom.getDestination());
task.getPomFile().fileProvider(generateMavenPom.map(GenerateMavenPom::getDestination));
});
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
import org.gradle.api.provider.ListProperty;
import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.SetProperty;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.OutputFile;
import org.gradle.api.tasks.PathSensitive;
import org.gradle.api.tasks.PathSensitivity;
Expand All @@ -36,14 +38,15 @@
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;

import javax.inject.Inject;

/**
* This task wraps up the details of building a Docker image, including adding a pull
* mechanism that can retry, and emitting the image SHA as a task output.
*/
public class DockerBuildTask extends DefaultTask {
public abstract class DockerBuildTask extends DefaultTask {
private static final Logger LOGGER = Logging.getLogger(DockerBuildTask.class);

private final WorkerExecutor workerExecutor;
Expand All @@ -55,15 +58,13 @@ public class DockerBuildTask extends DefaultTask {
private boolean noCache = true;
private String[] baseImages;
private MapProperty<String, String> buildArgs;
private Property<String> platform;

@Inject
public DockerBuildTask(WorkerExecutor workerExecutor, ObjectFactory objectFactory, ProjectLayout projectLayout) {
this.workerExecutor = workerExecutor;
this.markerFile = objectFactory.fileProperty();
this.dockerContext = objectFactory.directoryProperty();
this.buildArgs = objectFactory.mapProperty(String.class, String.class);
this.platform = objectFactory.property(String.class).convention(Architecture.current().dockerPlatform);
this.markerFile.set(projectLayout.getBuildDirectory().file("markers/" + this.getName() + ".marker"));
}

Expand All @@ -75,9 +76,10 @@ public void build() {
params.getTags().set(Arrays.asList(tags));
params.getPull().set(pull);
params.getNoCache().set(noCache);
params.getPush().set(getPush().getOrElse(false));
params.getBaseImages().set(Arrays.asList(baseImages));
params.getBuildArgs().set(buildArgs);
params.getPlatform().set(platform);
params.getPlatforms().set(getPlatforms());
});
}

Expand Down Expand Up @@ -129,10 +131,16 @@ public MapProperty<String, String> getBuildArgs() {
}

@Input
public Property<String> getPlatform() {
return platform;
public abstract SetProperty<String> getPlatforms();

public void setPlatform(String platform) {
getPlatforms().set(Arrays.asList(platform));
}

@Input
@Optional
public abstract Property<Boolean> getPush();

@OutputFile
public RegularFileProperty getMarkerFile() {
return markerFile;
Expand Down Expand Up @@ -181,7 +189,7 @@ public void execute() {
}

final List<String> tags = parameters.getTags().get();
final boolean isCrossPlatform = parameters.getPlatform().get().equals(Architecture.current().dockerPlatform) == false;
final boolean isCrossPlatform = isCrossPlatform();

LoggedExec.exec(execOperations, spec -> {
spec.executable("docker");
Expand All @@ -193,7 +201,7 @@ public void execute() {
spec.args("build", parameters.getDockerContext().get().getAsFile().getAbsolutePath());

if (isCrossPlatform) {
spec.args("--platform", parameters.getPlatform().get());
spec.args("--platform", parameters.getPlatforms().get().stream().collect(Collectors.joining(",")));
}

if (parameters.getNoCache().get()) {
Expand All @@ -203,18 +211,34 @@ public void execute() {
tags.forEach(tag -> spec.args("--tag", tag));

parameters.getBuildArgs().get().forEach((k, v) -> spec.args("--build-arg", k + "=" + v));

if (parameters.getPush().getOrElse(false)) {
spec.args("--push");
}
});

// Fetch the Docker image's hash, and write it to desk as the task's output. Doing this allows us
// to do proper up-to-date checks in Gradle.
try {
// multi-platform image builds do not end up in local registry, so we need to pull the just build image
// first to get the checksum and also serves as a test for the image being pushed correctly
if (parameters.getPlatforms().get().size() > 1 && parameters.getPush().getOrElse(false)) {
pullBaseImage(tags.get(0));
}
final String checksum = getImageChecksum(tags.get(0));
Files.writeString(parameters.getMarkerFile().getAsFile().get().toPath(), checksum + "\n");
} catch (IOException e) {
throw new RuntimeException("Failed to write marker file", e);
}
}

private boolean isCrossPlatform() {
return getParameters().getPlatforms()
.get()
.stream()
.anyMatch(any -> any.equals(Architecture.current().dockerPlatform) == false);
}

private String getImageChecksum(String imageTag) {
final ByteArrayOutputStream stdout = new ByteArrayOutputStream();

Expand Down Expand Up @@ -243,6 +267,8 @@ interface Parameters extends WorkParameters {

MapProperty<String, String> getBuildArgs();

Property<String> getPlatform();
SetProperty<String> getPlatforms();

Property<Boolean> getPush();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
Expand Down Expand Up @@ -187,7 +188,7 @@ public void checkDependencies() {
}
File licensesDirAsFile = licensesDir.get().getAsFile();
if (dependencies.isEmpty()) {
if (licensesDirAsFile.exists()) {
if (licensesDirAsFile.exists() && allIgnored() == false) {
throw new GradleException("Licenses dir " + licensesDirAsFile + " exists, but there are no dependencies");
}
return; // no dependencies to check
Expand Down Expand Up @@ -227,6 +228,10 @@ public void checkDependencies() {
sources.forEach((item, exists) -> failIfAnyMissing(item, exists, "sources"));
}

private boolean allIgnored() {
return Arrays.asList(getLicensesDir().listFiles()).stream().map(f -> f.getName()).allMatch(ignoreFiles::contains);
}

// This is just a marker output folder to allow this task being up-to-date.
// The check logic is exception driven so a failed tasks will not be defined
// by this output but when successful we can safely mark the task as up-to-date.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,17 @@

package org.elasticsearch.gradle.internal.release;

import com.github.javaparser.GeneratedJavaParserConstants;
import com.github.javaparser.StaticJavaParser;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.observer.ObservableProperty;
import com.github.javaparser.printer.ConcreteSyntaxModel;
import com.github.javaparser.printer.concretesyntaxmodel.CsmElement;
import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter;
import com.google.common.annotations.VisibleForTesting;

Expand All @@ -27,6 +31,7 @@
import org.gradle.initialization.layout.BuildLayout;

import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
Expand All @@ -42,7 +47,84 @@
import javax.annotation.Nullable;
import javax.inject.Inject;

import static com.github.javaparser.ast.observer.ObservableProperty.TYPE_PARAMETERS;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmConditional.Condition.FLAG;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.block;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.child;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.comma;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.comment;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.conditional;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.list;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.newline;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.none;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.sequence;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.space;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.string;
import static com.github.javaparser.printer.concretesyntaxmodel.CsmElement.token;

public class UpdateVersionsTask extends DefaultTask {

static {
replaceDefaultJavaParserClassCsm();
}

/*
* The default JavaParser CSM which it uses to format any new declarations added to a class
* inserts two newlines after each declaration. Our version classes only have one newline.
* In order to get javaparser lexical printer to use our format, we have to completely replace
* the statically declared CSM pattern using hacky reflection
* to access the static map where these are stored, and insert a replacement that is identical
* apart from only one newline at the end of each member declaration, rather than two.
*/
private static void replaceDefaultJavaParserClassCsm() {
try {
Field classCsms = ConcreteSyntaxModel.class.getDeclaredField("concreteSyntaxModelByClass");
classCsms.setAccessible(true);
@SuppressWarnings({ "unchecked", "rawtypes" })
Map<Class, CsmElement> csms = (Map) classCsms.get(null);

// copied from the static initializer in ConcreteSyntaxModel
csms.put(
ClassOrInterfaceDeclaration.class,
sequence(
comment(),
list(ObservableProperty.ANNOTATIONS, newline(), none(), newline()),
list(ObservableProperty.MODIFIERS, space(), none(), space()),
conditional(
ObservableProperty.INTERFACE,
FLAG,
token(GeneratedJavaParserConstants.INTERFACE),
token(GeneratedJavaParserConstants.CLASS)
),
space(),
child(ObservableProperty.NAME),
list(
TYPE_PARAMETERS,
sequence(comma(), space()),
string(GeneratedJavaParserConstants.LT),
string(GeneratedJavaParserConstants.GT)
),
list(
ObservableProperty.EXTENDED_TYPES,
sequence(string(GeneratedJavaParserConstants.COMMA), space()),
sequence(space(), token(GeneratedJavaParserConstants.EXTENDS), space()),
none()
),
list(
ObservableProperty.IMPLEMENTED_TYPES,
sequence(string(GeneratedJavaParserConstants.COMMA), space()),
sequence(space(), token(GeneratedJavaParserConstants.IMPLEMENTS), space()),
none()
),
space(),
block(sequence(newline(), list(ObservableProperty.MEMBERS, sequence(newline()/*, newline()*/), newline(), newline())))
)
);
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private static final Logger LOGGER = Logging.getLogger(UpdateVersionsTask.class);

static final String SERVER_MODULE_PATH = "server/src/main/java/";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,17 @@ public void execute(DependencyLicensesTask dependencyLicensesTask) {
public void givenProjectWithLicensesDirButNoDependenciesThenShouldThrowException() throws Exception {
expectedException.expect(GradleException.class);
expectedException.expectMessage(containsString("exists, but there are no dependencies"));
getLicensesDir(project).mkdir();
createFileIn(getLicensesDir(project), "groovy-LICENSE.txt", PERMISSIVE_LICENSE_TEXT);
task.get().checkDependencies();
}

@Test
public void givenProjectWithLicensesDirButAllIgnoreFileAndNoDependencies() throws Exception {
getLicensesDir(project).mkdir();
String licenseFileName = "cloudcarbonfootprint-LICENSE.txt";
createFileIn(getLicensesDir(project), licenseFileName, PERMISSIVE_LICENSE_TEXT);
task.get().ignoreFile(licenseFileName);
task.get().checkDependencies();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@

public class NoopPlugin extends Plugin implements ActionPlugin {

public static final ActionType<SearchResponse> NOOP_SEARCH_ACTION = new ActionType<>("mock:data/read/search", SearchResponse::new);
public static final ActionType<BulkResponse> NOOP_BULK_ACTION = new ActionType<>("mock:data/write/bulk", BulkResponse::new);
public static final ActionType<SearchResponse> NOOP_SEARCH_ACTION = new ActionType<>("mock:data/read/search");
public static final ActionType<BulkResponse> NOOP_BULK_ACTION = new ActionType<>("mock:data/write/bulk");

@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
Expand Down
4 changes: 2 additions & 2 deletions distribution/docker/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) {

noCache = BuildParams.isCi
tags = generateTags(base, architecture)
platform = architecture.dockerPlatform
platforms.add(architecture.dockerPlatform)

// We don't build the Iron Bank image when we release Elasticsearch, as there's
// separate process for submitting new releases. However, for testing we do a
Expand Down Expand Up @@ -468,7 +468,7 @@ void addBuildEssDockerImageTask(Architecture architecture) {
noCache = BuildParams.isCi
baseImages = []
tags = generateTags(base, architecture)
platform = architecture.dockerPlatform
platforms.add(architecture.dockerPlatform)

onlyIf("$architecture supported") { isArchitectureSupported(architecture) }
}
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/104559.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104559
summary: Adding support for Cohere inference service
area: Machine Learning
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104591.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104591
summary: Avoid execute ESQL planning on refresh thread
area: ES|QL
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104643.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104643
summary: "[Connectors API] Implement update service type action"
area: Application
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104654.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104654
summary: "[Connectors API] Implement update native action endpoint"
area: Application
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104674.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104674
summary: "[Profiling] Speed up processing of stacktraces"
area: Application
type: enhancement
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/104718.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 104718
summary: "ESQL: Fix replacement of nested expressions in aggs with multiple parameters"
area: ES|QL
type: bug
issues:
- 104706
6 changes: 6 additions & 0 deletions docs/changelog/104722.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 104722
summary: Avoid possible datafeed infinite loop with filtering aggregations
area: Machine Learning
type: bug
issues:
- 104699
Loading

0 comments on commit 6b78ddc

Please sign in to comment.