Skip to content

Commit

Permalink
Merge branch 'main' into serverless/cat_filtered_actions
Browse files Browse the repository at this point in the history
  • Loading branch information
rjernst committed Sep 14, 2023
2 parents 7b68411 + c84f20b commit a499edc
Show file tree
Hide file tree
Showing 552 changed files with 9,379 additions and 4,203 deletions.
36 changes: 22 additions & 14 deletions .buildkite/scripts/periodic.trigger.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

set -euo pipefail

exit 0

echo "steps:"

source .buildkite/scripts/branches.sh
Expand All @@ -14,23 +12,33 @@ for BRANCH in "${BRANCHES[@]}"; do
LAST_GOOD_COMMIT=$(echo "${BUILD_JSON}" | jq -r '.commit')

cat <<EOF
- trigger: elasticsearch-periodic
label: Trigger periodic pipeline for $BRANCH
async: true
build:
branch: "$BRANCH"
commit: "$LAST_GOOD_COMMIT"
- trigger: elasticsearch-periodic-packaging
label: Trigger periodic-packaging pipeline for $BRANCH
async: true
build:
branch: "$BRANCH"
commit: "$LAST_GOOD_COMMIT"
- trigger: elasticsearch-periodic-platform-support
label: Trigger periodic-platform-support pipeline for $BRANCH
async: true
build:
branch: "$BRANCH"
commit: "$LAST_GOOD_COMMIT"
EOF

### Only platform-support enabled for right now
# cat <<EOF
# - trigger: elasticsearch-periodic
# label: Trigger periodic pipeline for $BRANCH
# async: true
# build:
# branch: "$BRANCH"
# commit: "$LAST_GOOD_COMMIT"
# - trigger: elasticsearch-periodic-packaging
# label: Trigger periodic-packaging pipeline for $BRANCH
# async: true
# build:
# branch: "$BRANCH"
# commit: "$LAST_GOOD_COMMIT"
# - trigger: elasticsearch-periodic-platform-support
# label: Trigger periodic-platform-support pipeline for $BRANCH
# async: true
# build:
# branch: "$BRANCH"
# commit: "$LAST_GOOD_COMMIT"
# EOF
done
20 changes: 7 additions & 13 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -659,11 +659,11 @@ numbering scheme separate to release version. The main ones are
`TransportVersion` and `IndexVersion`, representing the version of the
inter-node binary protocol and index data + metadata respectively.

Separated version numbers are comprised of a simple incrementing number,
with no semantic versioning information. There is no direct mapping between
separated version numbers and the release version. The versions used by any
particular instance of Elasticsearch can be obtained by querying `/`
on the node.
Separated version numbers are comprised of an integer number. The semantic
meaing of a version number are defined within each `*Version` class. There
is no direct mapping between separated version numbers and the release version.
The versions used by any particular instance of Elasticsearch can be obtained
by querying `/_nodes/info` on the node.

#### Using separated version numbers

Expand All @@ -674,14 +674,8 @@ number, there are a few rules that need to be followed:
and should not be modified once it is defined. Each version is immutable
once merged into `main`.
2. To create a new component version, add a new constant to the respective class
using the preceding version number +1, modify the version id string to a new
unique string (normally a UUID), and set that constant as the new current
version.

The version ID string in the constant definition is not used in the executing
code; it is there to ensure that if two concurrent pull requests add the same
version constant, there will be a git conflict on those lines. This is to ensure
two PRs don't accidentally use the same version constant.
with a descriptive name of the change being made. Increment the integer
number according to the partciular `*Version` class.

If your pull request has a conflict around your new version constant,
you need to update your PR from `main` and change your PR to use the next
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.LongVector;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.DriverContext;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.core.TimeValue;
Expand Down Expand Up @@ -79,14 +80,14 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
return switch (operation) {
case "abs" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get();
yield EvalMapper.toEvaluator(new Abs(Source.EMPTY, longField), layout(longField)).get(new DriverContext());
}
case "add" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(
new Add(Source.EMPTY, longField, new Literal(Source.EMPTY, 1L, DataTypes.LONG)),
layout(longField)
).get();
).get(new DriverContext());
}
case "date_trunc" -> {
FieldAttribute timestamp = new FieldAttribute(
Expand All @@ -97,28 +98,28 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) {
yield EvalMapper.toEvaluator(
new DateTrunc(Source.EMPTY, new Literal(Source.EMPTY, Duration.ofHours(24), EsqlDataTypes.TIME_DURATION), timestamp),
layout(timestamp)
).get();
).get(new DriverContext());
}
case "equal_to_const" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(
new Equals(Source.EMPTY, longField, new Literal(Source.EMPTY, 100_000L, DataTypes.LONG)),
layout(longField)
).get();
).get(new DriverContext());
}
case "long_equal_to_long" -> {
FieldAttribute lhs = longField();
FieldAttribute rhs = longField();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext());
}
case "long_equal_to_int" -> {
FieldAttribute lhs = longField();
FieldAttribute rhs = intField();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get();
yield EvalMapper.toEvaluator(new Equals(Source.EMPTY, lhs, rhs), layout(lhs, rhs)).get(new DriverContext());
}
case "mv_min", "mv_min_ascending" -> {
FieldAttribute longField = longField();
yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get();
yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(new DriverContext());
}
default -> throw new UnsupportedOperationException();
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@
import org.elasticsearch.compute.data.BooleanBlock;
import org.elasticsearch.compute.data.BytesRefBlock;
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.compute.operator.TopNOperator;
import org.elasticsearch.compute.operator.topn.TopNEncoder;
import org.elasticsearch.compute.operator.topn.TopNOperator;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
Expand Down Expand Up @@ -77,8 +79,27 @@ private static Operator operator(String data, int topCount) {
case TWO_LONGS, LONGS_AND_BYTES_REFS -> 2;
default -> throw new IllegalArgumentException("unsupported data type [" + data + "]");
};
List<ElementType> elementTypes = switch (data) {
case LONGS -> List.of(ElementType.LONG);
case INTS -> List.of(ElementType.INT);
case DOUBLES -> List.of(ElementType.DOUBLE);
case BOOLEANS -> List.of(ElementType.BOOLEAN);
case BYTES_REFS -> List.of(ElementType.BYTES_REF);
case TWO_LONGS -> List.of(ElementType.INT, ElementType.INT);
case LONGS_AND_BYTES_REFS -> List.of(ElementType.INT, ElementType.BYTES_REF);
default -> throw new IllegalArgumentException("unsupported data type [" + data + "]");
};
List<TopNEncoder> encoders = switch (data) {
case LONGS, INTS, DOUBLES, BOOLEANS -> List.of(TopNEncoder.DEFAULT_SORTABLE);
case BYTES_REFS -> List.of(TopNEncoder.UTF8);
case TWO_LONGS -> List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.DEFAULT_SORTABLE);
case LONGS_AND_BYTES_REFS -> List.of(TopNEncoder.DEFAULT_SORTABLE, TopNEncoder.UTF8);
default -> throw new IllegalArgumentException("unsupported data type [" + data + "]");
};
return new TopNOperator(
topCount,
elementTypes,
encoders,
IntStream.range(0, count).mapToObj(c -> new TopNOperator.SortOrder(c, false, false)).toList(),
16 * 1024
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.elasticsearch.compute.lucene.LuceneSourceOperator;
import org.elasticsearch.compute.lucene.ValueSourceInfo;
import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator;
import org.elasticsearch.compute.operator.TopNOperator;
import org.elasticsearch.compute.operator.topn.TopNOperator;
import org.elasticsearch.core.IOUtils;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
Expand Down
3 changes: 0 additions & 3 deletions branches.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@
{
"branch": "8.10"
},
{
"branch": "8.9"
},
{
"branch": "7.17"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,10 @@ public class InternalTestArtifactPlugin implements Plugin<Project> {
public void apply(Project project) {
project.getPlugins().apply(InternalTestArtifactBasePlugin.class);
InternalTestArtifactExtension testArtifactExtension = project.getExtensions().getByType(InternalTestArtifactExtension.class);
SourceSet testSourceSet = project.getExtensions().getByType(SourceSetContainer.class).getByName("test");
testArtifactExtension.registerTestArtifactFromSourceSet(testSourceSet);
project.getExtensions().getByType(SourceSetContainer.class).all(sourceSet -> {
if (sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME) == false) {
testArtifactExtension.registerTestArtifactFromSourceSet(sourceSet);
}
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.attributes.Attribute;
import org.gradle.api.file.FileTree;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.tasks.ClasspathNormalizer;
Expand Down Expand Up @@ -69,6 +70,7 @@ public class RestTestBasePlugin implements Plugin<Project> {
private static final String MODULES_CONFIGURATION = "clusterModules";
private static final String PLUGINS_CONFIGURATION = "clusterPlugins";
private static final String EXTRACTED_PLUGINS_CONFIGURATION = "extractedPlugins";
private static final Attribute<String> CONFIGURATION_ATTRIBUTE = Attribute.of("test-cluster-artifacts", String.class);

private final ProviderFactory providerFactory;

Expand Down Expand Up @@ -249,6 +251,7 @@ private Optional<String> findModulePath(Project project, String pluginName) {

private Configuration createPluginConfiguration(Project project, String name, boolean useExploded, boolean isExtended) {
return project.getConfigurations().create(name, c -> {
c.attributes(a -> a.attribute(CONFIGURATION_ATTRIBUTE, name));
if (useExploded) {
c.attributes(a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE));
} else {
Expand Down
39 changes: 0 additions & 39 deletions catalog-info.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -98,45 +98,6 @@ spec:
build_pull_requests: false
publish_commit_status: false
trigger_mode: none
# ---
# # yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
# apiVersion: backstage.io/v1alpha1
# kind: Resource
# metadata:
# name: buildkite-pipeline-elasticsearch-periodic-trigger
# description: Triggers periodic pipelines for all required branches
# links:
# - title: Pipeline
# url: https://buildkite.com/elastic/elasticsearch-periodic-trigger
# spec:
# type: buildkite-pipeline
# system: buildkite
# owner: group:elasticsearch-team
# implementation:
# apiVersion: buildkite.elastic.dev/v1
# kind: Pipeline
# metadata:
# description: ":elasticsearch: Triggers periodic pipelines for all required branches"
# name: elasticsearch / periodic / trigger
# spec:
# repository: elastic/elasticsearch
# pipeline_file: .buildkite/scripts/periodic.trigger.sh
# branch_configuration: main
# teams:
# elasticsearch-team: {}
# ml-core: {}
# everyone:
# access_level: BUILD_AND_READ
# provider_settings:
# build_branches: false
# build_pull_requests: false
# publish_commit_status: false
# trigger_mode: none
# schedules:
# Periodically on main:
# branch: main
# cronline: "0 0,8,16 * * * America/New_York"
# message: "Triggers pipelines 3x daily"
---
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
apiVersion: backstage.io/v1alpha1
Expand Down
6 changes: 6 additions & 0 deletions docs/changelog/97317.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 97317
summary: "Fix merges of mappings with `subobjects: false` for composable index templates"
area: Mapping
type: bug
issues:
- 96768
6 changes: 6 additions & 0 deletions docs/changelog/98244.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 98244
summary: Optimize ContentPath#pathAsText
area: Search
type: enhancement
issues:
- 94544
5 changes: 5 additions & 0 deletions docs/changelog/99054.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 99054
summary: "ESQL: Mark counter fields as unsupported"
area: ES|QL
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/99316.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 99316
summary: "ESQL: Compact topn"
area: ES|QL
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/99346.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 99346
summary: Automatically disable `ignore_malformed` on datastream `@timestamp` fields
area: Mapping
type: bug
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/99491.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 99491
summary: Use long in Centroid count
area: Aggregations
type: bug
issues:
- 80153
Loading

0 comments on commit a499edc

Please sign in to comment.