Skip to content

Commit

Permalink
Merge branch '8.x' into lucene_9_12_1
Browse files Browse the repository at this point in the history
  • Loading branch information
ChrisHegarty authored Dec 11, 2024
2 parents 033d3fd + 3491312 commit 389e6c3
Show file tree
Hide file tree
Showing 119 changed files with 1,510 additions and 464 deletions.
2 changes: 1 addition & 1 deletion build-tools-internal/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ commonscodec = 1.15
protobuf = 3.25.5

# test dependencies
randomizedrunner = 2.8.0
randomizedrunner = 2.8.2
junit = 4.13.2
junit5 = 5.7.1
hamcrest = 2.1
Expand Down
6 changes: 6 additions & 0 deletions docs/changelog/118177.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 118177
summary: Fixing bedrock event executor terminated cache issue
area: Machine Learning
type: bug
issues:
- 117916
5 changes: 5 additions & 0 deletions docs/changelog/118267.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118267
summary: Adding get migration reindex status
area: Data streams
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/118354.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118354
summary: Fix log message format bugs
area: Ingest Node
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/118369.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118369
summary: "[8.x] Update sparse text embeddings API route for Inference Service"
area: Inference
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/118378.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 118378
summary: Opt into extra data stream resolution
area: ES|QL
type: bug
issues: []
2 changes: 1 addition & 1 deletion docs/plugins/analysis-nori.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ The input is untokenized text and the result is the single term attribute emitte
- 영영칠 -> 7
- 일영영영 -> 1000
- 삼천2백2십삼 -> 3223
- 조육백만오천일 -> 1000006005001
- 일조육백만오천일 -> 1000006005001
- 3.2천 -> 3200
- 1.2만345.67 -> 12345.67
- 4,647.100 -> 4647.1
Expand Down
15 changes: 10 additions & 5 deletions gradle/verification-metadata.xml
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,11 @@
<sha256 value="3180617871494fe5585e93d0986fc3eb556ade2e64076730917d3a67e3928a24" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.carrotsearch.randomizedtesting" name="randomizedtesting-runner" version="2.8.2">
<artifact name="randomizedtesting-runner-2.8.2.jar">
<sha256 value="01a3551f40b56265ba117125311b4fe6865355c179f2dfc354d67f09839bc7a9" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="com.cedarsoftware" name="java-util" version="1.9.0">
<artifact name="java-util-1.9.0.jar">
<sha256 value="b81bdfc4ba11bbc88742ea14e8832d0a1031dd628868f5abbdabb8f8b98705dc" origin="Generated by Gradle"/>
Expand Down Expand Up @@ -4473,11 +4478,11 @@
<sha256 value="6e24913b021ffacfe8e7e053d6e0ccc731941148cfa078d4f1ed3d96904530f8" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.ow2.asm" name="asm-util" version="9.7.1">
<artifact name="asm-util-9.7.1.jar">
<sha256 value="f885be71b5c90556f5f1ad1c4f9276b29b96057c497d46666fe4ddbec3cb43c6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.ow2.asm" name="asm-util" version="9.7.1">
<artifact name="asm-util-9.7.1.jar">
<sha256 value="f885be71b5c90556f5f1ad1c4f9276b29b96057c497d46666fe4ddbec3cb43c6" origin="Generated by Gradle"/>
</artifact>
</component>
<component group="org.reactivestreams" name="reactive-streams" version="1.0.4">
<artifact name="reactive-streams-1.0.4.jar">
<sha256 value="f75ca597789b3dac58f61857b9ac2e1034a68fa672db35055a8fb4509e325f28" origin="Generated by Gradle"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

package org.elasticsearch.nativeaccess.jdk;

import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;
import org.elasticsearch.nativeaccess.VectorSimilarityFunctions;
import org.elasticsearch.nativeaccess.lib.LoaderHelper;
import org.elasticsearch.nativeaccess.lib.VectorLibrary;
Expand All @@ -25,6 +27,8 @@

public final class JdkVectorLibrary implements VectorLibrary {

static final Logger logger = LogManager.getLogger(JdkVectorLibrary.class);

static final MethodHandle dot7u$mh;
static final MethodHandle sqr7u$mh;

Expand All @@ -36,6 +40,7 @@ public final class JdkVectorLibrary implements VectorLibrary {

try {
int caps = (int) vecCaps$mh.invokeExact();
logger.info("vec_caps=" + caps);
if (caps != 0) {
if (caps == 2) {
dot7u$mh = downcallHandle(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -987,7 +987,7 @@ public void onResponse(Void unused) {
// should be no other processes interacting with the repository.
logger.warn(
Strings.format(
"failed to clean up multipart upload [{}] of blob [{}][{}][{}]",
"failed to clean up multipart upload [%s] of blob [%s][%s][%s]",
abortMultipartUploadRequest.getUploadId(),
blobStore.getRepositoryMetadata().name(),
abortMultipartUploadRequest.getBucketName(),
Expand Down
6 changes: 6 additions & 0 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -441,3 +441,9 @@ tests:
- class: org.elasticsearch.xpack.application.HuggingFaceServiceUpgradeIT
method: testHFEmbeddings {upgradedNodes=1}
issue: https://github.com/elastic/elasticsearch/issues/118197
- class: org.elasticsearch.reservedstate.service.RepositoriesFileSettingsIT
method: testSettingsApplied
issue: https://github.com/elastic/elasticsearch/issues/116694
- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT
method: test {lookup-join.LookupMessageFromIndexKeep ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/118399
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ private String getRollupIndexName() throws IOException {
if (asMap.size() == 1) {
return (String) asMap.keySet().toArray()[0];
}
logger.warn("--> No matching rollup name for path [%s]", endpoint);
logger.warn("--> No matching rollup name for path [{}]", endpoint);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ private String getRollupIndexName() throws IOException {
if (asMap.size() == 1) {
return (String) asMap.keySet().toArray()[0];
}
logger.warn("--> No matching rollup name for path [%s]", endpoint);
logger.warn("--> No matching rollup name for path [{}]", endpoint);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"migrate.get_reindex_status":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html",
"description":"This API returns the status of a migration reindex attempt for a data stream or index"
},
"stability":"experimental",
"visibility":"private",
"headers":{
"accept": [ "application/json"],
"content_type": ["application/json"]
},
"url":{
"paths":[
{
"path":"/_migration/reindex/{index}/_status",
"methods":[
"GET"
],
"parts":{
"index":{
"type":"string",
"description":"The index or data stream name"
}
}
}
]
}
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ public void testRetentionLeasesSyncOnRecovery() throws Exception {
.getShardOrNull(new ShardId(resolveIndex("index"), 0));
final int length = randomIntBetween(1, 8);
final Map<String, RetentionLease> currentRetentionLeases = new LinkedHashMap<>();
logger.info("adding retention [{}}] leases", length);
logger.info("adding retention [{}] leases", length);
for (int i = 0; i < length; i++) {
final String id = randomValueOtherThanMany(currentRetentionLeases.keySet()::contains, () -> randomAlphaOfLength(8));
final long retainingSequenceNumber = randomLongBetween(0, Long.MAX_VALUE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
Expand Down Expand Up @@ -84,7 +85,7 @@ protected void masterOperation(
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices());
Map<String, AliasFilter> indicesAndFilters = new HashMap<>();
Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
Set<ResolvedExpression> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
for (String index : concreteIndices) {
final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases);
final String[] aliases = indexNameExpressionResolver.indexAliases(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -565,8 +566,8 @@ static void resolveIndices(
if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) {
names = new String[] { "**" };
}
Set<String> resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names);
for (String s : resolvedIndexAbstractions) {
Set<ResolvedExpression> resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names);
for (ResolvedExpression s : resolvedIndexAbstractions) {
enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams);
}
indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName));
Expand Down Expand Up @@ -597,12 +598,12 @@ private static void mergeResults(

private static void enrichIndexAbstraction(
ClusterState clusterState,
String indexAbstraction,
ResolvedExpression indexAbstraction,
List<ResolvedIndex> indices,
List<ResolvedAlias> aliases,
List<ResolvedDataStream> dataStreams
) {
IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction);
IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource());
if (ia != null) {
switch (ia.getType()) {
case CONCRETE_INDEX -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
Expand Down Expand Up @@ -133,7 +134,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener
@Override
protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) {
final ClusterState clusterState = clusterService.state();
final Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
final Set<ResolvedExpression> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices());
final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases);
return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.Writeable;
Expand Down Expand Up @@ -109,7 +110,7 @@ protected boolean resolveIndex(ExplainRequest request) {

@Override
protected void resolveRequest(ClusterState state, InternalRequest request) {
final Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index());
final Set<ResolvedExpression> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index());
final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases);
request.request().filteringAlias(aliasFilter);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
Expand Down Expand Up @@ -110,6 +111,7 @@
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.LongSupplier;
import java.util.stream.Collectors;

import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
Expand Down Expand Up @@ -202,14 +204,17 @@ public TransportSearchAction(

private Map<String, OriginalIndices> buildPerIndexOriginalIndices(
ClusterState clusterState,
Set<String> indicesAndAliases,
Set<ResolvedExpression> indicesAndAliases,
String[] indices,
IndicesOptions indicesOptions
) {
Map<String, OriginalIndices> res = Maps.newMapWithExpectedSize(indices.length);
var blocks = clusterState.blocks();
// optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking
boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false;
// Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from
// concrete index to the expression that produced it.
Set<String> indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet());
for (String index : indices) {
if (hasBlocks) {
blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index);
Expand All @@ -226,8 +231,8 @@ private Map<String, OriginalIndices> buildPerIndexOriginalIndices(
String[] finalIndices = Strings.EMPTY_ARRAY;
if (aliases == null
|| aliases.length == 0
|| indicesAndAliases.contains(index)
|| hasDataStreamRef(clusterState, indicesAndAliases, index)) {
|| indicesAndAliasesResources.contains(index)
|| hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) {
finalIndices = new String[] { index };
}
if (aliases != null) {
Expand All @@ -246,7 +251,11 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set<String> i
return indicesAndAliases.contains(ret.getParentDataStream().getName());
}

Map<String, AliasFilter> buildIndexAliasFilters(ClusterState clusterState, Set<String> indicesAndAliases, Index[] concreteIndices) {
Map<String, AliasFilter> buildIndexAliasFilters(
ClusterState clusterState,
Set<ResolvedExpression> indicesAndAliases,
Index[] concreteIndices
) {
final Map<String, AliasFilter> aliasFilterMap = new HashMap<>();
for (Index index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName());
Expand Down Expand Up @@ -1236,7 +1245,10 @@ private void executeSearch(
} else {
final Index[] indices = resolvedIndices.getConcreteLocalIndices();
concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new);
final Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices());
final Set<ResolvedExpression> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(
clusterState,
searchRequest.indices()
);
aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices);
aliasFilter.putAll(remoteAliasMap);
localShardIterators = getLocalShardsIterator(
Expand Down Expand Up @@ -1834,7 +1846,7 @@ List<SearchShardIterator> getLocalShardsIterator(
ClusterState clusterState,
SearchRequest searchRequest,
String clusterAlias,
Set<String> indicesAndAliases,
Set<ResolvedExpression> indicesAndAliases,
String[] concreteIndices
) {
var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.index.Index;
Expand Down Expand Up @@ -127,7 +128,10 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act
searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null),
listener.delegateFailureAndWrap((delegate, searchRequest) -> {
Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices();
final Set<String> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices());
final Set<ResolvedExpression> indicesAndAliases = indexNameExpressionResolver.resolveExpressions(
clusterState,
searchRequest.indices()
);
final Map<String, AliasFilter> aliasFilters = transportSearchAction.buildIndexAliasFilters(
clusterState,
indicesAndAliases,
Expand Down
Loading

0 comments on commit 389e6c3

Please sign in to comment.