From 205df2ae8e89ce37ba1620162710668890f1f03f Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Jan 2024 15:49:06 +0000 Subject: [PATCH 01/47] Track hot threads as bytes not strings (#103812) No need to allocate a single enormous string for each node-level response, we can leave them as bytes and process them in a streaming fashion as needed. --- .../transport/netty4/ESLoggingHandlerIT.java | 10 +- .../action/admin/HotThreadsIT.java | 93 +++++++++++-------- .../org/elasticsearch/TransportVersions.java | 1 + .../node/hotthreads/NodeHotThreads.java | 56 +++++++++-- .../hotthreads/NodesHotThreadsResponse.java | 49 ++++++++-- .../TransportNodesHotThreadsAction.java | 26 +++++- .../cluster/coordination/LagDetector.java | 19 +++- .../cluster/RestNodesHotThreadsAction.java | 3 +- .../NodesHotThreadsResponseTests.java | 44 ++++++--- .../coordination/LagDetectorTests.java | 2 +- 10 files changed, 220 insertions(+), 83 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index 3c869a89cfaa9..9202db6f49a8e 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -11,9 +11,8 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; -import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -21,7 +20,6 @@ import org.elasticsearch.transport.TransportLogger; import java.io.IOException; -import java.util.concurrent.TimeUnit; @ESIntegTestCase.ClusterScope(numDataNodes = 2, scope = ESIntegTestCase.Scope.TEST) public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { @@ -54,7 +52,7 @@ public void testLoggingHandler() { + ", request id: \\d+" + ", type: request" + ", version: .*" - + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + ", action: cluster:monitor/nodes/stats\\[n\\]\\]" + " WRITE: \\d+B"; final MockLogAppender.LoggingExpectation writeExpectation = new MockLogAppender.PatternSeenEventExpectation( "hot threads request", @@ -74,7 +72,7 @@ public void testLoggingHandler() { + ", request id: \\d+" + ", type: request" + ", version: .*" - + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + ", action: cluster:monitor/nodes/stats\\[n\\]\\]" + " READ: \\d+B"; final MockLogAppender.LoggingExpectation readExpectation = new MockLogAppender.PatternSeenEventExpectation( @@ -87,7 +85,7 @@ public void testLoggingHandler() { appender.addExpectation(writeExpectation); appender.addExpectation(flushExpectation); appender.addExpectation(readExpectation); - client().execute(TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()).actionGet(10, TimeUnit.SECONDS); + client().admin().cluster().prepareNodesStats().get(TimeValue.timeValueSeconds(10)); appender.assertAllExpectationsMatched(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 5fa63aaed0508..45865ddd35ced 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.logging.ChunkedLoggingStreamTests; import org.elasticsearch.core.TimeValue; @@ -24,7 +25,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; @@ -116,61 +116,72 @@ public void onFailure(Exception e) { public void testIgnoreIdleThreads() { assumeTrue("no support for hot_threads on FreeBSD", Constants.FREE_BSD == false); - // First time, don't ignore idle threads: - final NodesHotThreadsResponse firstResponse = client().execute( - TransportNodesHotThreadsAction.TYPE, - new NodesHotThreadsRequest().ignoreIdleThreads(false).threads(Integer.MAX_VALUE) - ).actionGet(10, TimeUnit.SECONDS); - final Matcher containsCachedTimeThreadRunMethod = containsString( "org.elasticsearch.threadpool.ThreadPool$CachedTimeThread.run" ); - int totSizeAll = 0; - for (NodeHotThreads node : firstResponse.getNodesMap().values()) { - totSizeAll += node.getHotThreads().length(); - assertThat(node.getHotThreads(), containsCachedTimeThreadRunMethod); - } + // First time, don't ignore idle threads: + final var totSizeAll = safeAwait( + SubscribableListener.newForked( + l -> client().execute( + TransportNodesHotThreadsAction.TYPE, + new NodesHotThreadsRequest().ignoreIdleThreads(false).threads(Integer.MAX_VALUE), + l.map(response -> { + int length = 0; + for (NodeHotThreads node : response.getNodesMap().values()) { + length += node.getHotThreads().length(); + assertThat(node.getHotThreads(), containsCachedTimeThreadRunMethod); + } + return length; + }) + ) + ) + ); // Second time, do ignore idle threads: final var request = new NodesHotThreadsRequest().threads(Integer.MAX_VALUE); // Make sure default is true: assertTrue(request.ignoreIdleThreads()); - final NodesHotThreadsResponse secondResponse = client().execute(TransportNodesHotThreadsAction.TYPE, request) - .actionGet(10, TimeUnit.SECONDS); - - int totSizeIgnoreIdle = 0; - for (NodeHotThreads node : secondResponse.getNodesMap().values()) { - totSizeIgnoreIdle += node.getHotThreads().length(); - assertThat(node.getHotThreads(), not(containsCachedTimeThreadRunMethod)); - } + final var totSizeIgnoreIdle = safeAwait( + SubscribableListener.newForked(l -> client().execute(TransportNodesHotThreadsAction.TYPE, request, l.map(response -> { + int length = 0; + for (NodeHotThreads node : response.getNodesMap().values()) { + length += node.getHotThreads().length(); + assertThat(node.getHotThreads(), not(containsCachedTimeThreadRunMethod)); + } + return length; + }))) + ); // The filtered stacks should be smaller than unfiltered ones: assertThat(totSizeIgnoreIdle, lessThan(totSizeAll)); } public void testTimestampAndParams() { - - final NodesHotThreadsResponse response = client().execute(TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()) - .actionGet(10, TimeUnit.SECONDS); - - if (Constants.FREE_BSD) { - for (NodeHotThreads node : response.getNodesMap().values()) { - assertThat(node.getHotThreads(), containsString("hot_threads is not supported")); - } - } else { - for (NodeHotThreads node : response.getNodesMap().values()) { - assertThat( - node.getHotThreads(), - allOf( - containsString("Hot threads at"), - containsString("interval=500ms"), - containsString("busiestThreads=3"), - containsString("ignoreIdleThreads=true") - ) - ); - } - } + safeAwait( + SubscribableListener.newForked( + l -> client().execute(TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest(), l.map(response -> { + if (Constants.FREE_BSD) { + for (NodeHotThreads node : response.getNodesMap().values()) { + assertThat(node.getHotThreads(), containsString("hot_threads is not supported")); + } + } else { + for (NodeHotThreads node : response.getNodesMap().values()) { + assertThat( + node.getHotThreads(), + allOf( + containsString("Hot threads at"), + containsString("interval=500ms"), + containsString("busiestThreads=3"), + containsString("ignoreIdleThreads=true") + ) + ); + } + } + return null; + })) + ) + ); } @TestLogging(reason = "testing logging at various levels", value = "org.elasticsearch.action.admin.HotThreadsIT:TRACE") diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d0365446ded9d..76fd9d077e2e7 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -180,6 +180,7 @@ static TransportVersion def(int id) { public static final TransportVersion UPDATE_API_KEY_EXPIRATION_TIME_ADDED = def(8_568_00_0); public static final TransportVersion LAZY_ROLLOVER_ADDED = def(8_569_00_0); public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); + public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 918bfbca8d304..1118a6318ddf7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -8,34 +8,78 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.io.InputStreamReader; +import java.io.StringReader; +import java.nio.charset.StandardCharsets; public class NodeHotThreads extends BaseNodeResponse { - private String hotThreads; + private final ReleasableBytesReference bytes; NodeHotThreads(StreamInput in) throws IOException { super(in); - hotThreads = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + bytes = in.readReleasableBytesReference(); + } else { + bytes = ReleasableBytesReference.wrap(new BytesArray(in.readString().getBytes(StandardCharsets.UTF_8))); + } } - public NodeHotThreads(DiscoveryNode node, String hotThreads) { + public NodeHotThreads(DiscoveryNode node, ReleasableBytesReference hotThreadsUtf8Bytes) { super(node); - this.hotThreads = hotThreads; + assert hotThreadsUtf8Bytes.hasReferences(); + bytes = hotThreadsUtf8Bytes; // takes ownership of the original ref, no need to .retain() } public String getHotThreads() { - return this.hotThreads; + return bytes.utf8ToString(); + } + + public java.io.Reader getHotThreadsReader() { + try { + return new InputStreamReader(bytes.streamInput(), StandardCharsets.UTF_8); + } catch (IOException e) { + assert false : e; // all in-memory, no IO takes place + return new StringReader("ERROR:" + e.toString()); + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(hotThreads); + if (out.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + out.writeBytesReference(bytes); + } else { + out.writeString(bytes.utf8ToString()); + } + } + + @Override + public void incRef() { + bytes.incRef(); + } + + @Override + public boolean tryIncRef() { + return bytes.tryIncRef(); + } + + @Override + public boolean decRef() { + return bytes.decRef(); + } + + @Override + public boolean hasReferences() { + return bytes.hasReferences(); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java index 59307009f785b..c0ceef18ca462 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java @@ -15,20 +15,29 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.transport.LeakTracker; import java.io.BufferedReader; import java.io.IOException; -import java.io.StringReader; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; -import java.util.Objects; public class NodesHotThreadsResponse extends BaseNodesResponse { + private final RefCounted refs = LeakTracker.wrap( + AbstractRefCounted.of(() -> Releasables.wrap(Iterators.map(getNodes().iterator(), n -> n::decRef)).close()) + ); + public NodesHotThreadsResponse(ClusterName clusterName, List nodes, List failures) { super(clusterName, nodes, failures); + for (NodeHotThreads nodeHotThreads : getNodes()) { + nodeHotThreads.mustIncRef(); + } } public Iterator> getTextChunks() { @@ -36,15 +45,21 @@ public Iterator> getTextChunks() { getNodes().iterator(), node -> Iterators.concat( Iterators.single(writer -> writer.append("::: ").append(node.getNode().toString()).append('\n')), - Iterators.map(new LinesIterator(node.getHotThreads()), line -> writer -> writer.append(" ").append(line).append('\n')), - Iterators.single(writer -> writer.append('\n')) + Iterators.map( + new LinesIterator(node.getHotThreadsReader()), + line -> writer -> writer.append(" ").append(line).append('\n') + ), + Iterators.single(writer -> { + assert hasReferences(); + writer.append('\n'); + }) ) ); } @Override protected List readNodesFrom(StreamInput in) throws IOException { - return in.readCollectionAsList(NodeHotThreads::new); + return TransportAction.localOnly(); } @Override @@ -56,8 +71,8 @@ private static class LinesIterator implements Iterator { final BufferedReader reader; String nextLine; - private LinesIterator(String input) { - reader = new BufferedReader(new StringReader(Objects.requireNonNull(input))); + private LinesIterator(java.io.Reader reader) { + this.reader = new BufferedReader(reader); advance(); } @@ -86,4 +101,24 @@ public String next() { } } } + + @Override + public void incRef() { + refs.incRef(); + } + + @Override + public boolean tryIncRef() { + return refs.tryIncRef(); + } + + @Override + public boolean decRef() { + return refs.decRef(); + } + + @Override + public boolean hasReferences() { + return refs.hasReferences(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index ea56c85e36a3a..e731d951493f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -15,17 +15,22 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.IOUtils; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.io.StringWriter; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.util.List; public class TransportNodesHotThreadsAction extends TransportNodesAction< @@ -74,17 +79,28 @@ protected NodeHotThreads newNodeResponse(StreamInput in, DiscoveryNode node) thr @Override protected NodeHotThreads nodeOperation(NodeRequest request, Task task) { - HotThreads hotThreads = new HotThreads().busiestThreads(request.request.threads) + final var hotThreads = new HotThreads().busiestThreads(request.request.threads) .type(request.request.type) .sortOrder(request.request.sortOrder) .interval(request.request.interval) .threadElementsSnapshotCount(request.request.snapshots) .ignoreIdleThreads(request.request.ignoreIdleThreads); - try (var writer = new StringWriter()) { - hotThreads.detect(writer); - return new NodeHotThreads(clusterService.localNode(), writer.toString()); + final var out = transportService.newNetworkBytesStream(); + final var trackedResource = LeakTracker.wrap(out); + var success = false; + try { + try (var writer = new OutputStreamWriter(Streams.flushOnCloseStream(out), StandardCharsets.UTF_8)) { + hotThreads.detect(writer); + } + final var result = new NodeHotThreads(clusterService.localNode(), new ReleasableBytesReference(out.bytes(), trackedResource)); + success = true; + return result; } catch (Exception e) { throw new ElasticsearchException("failed to detect hot threads", e); + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(trackedResource); + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java index 7a9c7c84d0f00..4f6b938b3745e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LagDetector.java @@ -237,12 +237,14 @@ public void onResponse(NodesHotThreadsResponse nodesHotThreadsResponse) { return; } + nodesHotThreadsResponse.mustIncRef(); loggingTaskRunner.enqueueTask( new HotThreadsLoggingTask( discoveryNode, appliedVersion, expectedVersion, - nodesHotThreadsResponse.getNodes().get(0).getHotThreads() + nodesHotThreadsResponse.getNodes().get(0).getHotThreads(), + Releasables.assertOnce(nodesHotThreadsResponse::decRef) ) ); } @@ -298,10 +300,18 @@ public void onFailure(Exception e) { static class HotThreadsLoggingTask extends AbstractRunnable implements Comparable { private final String nodeHotThreads; + private final Releasable releasable; private final String prefix; - HotThreadsLoggingTask(DiscoveryNode discoveryNode, long appliedVersion, long expectedVersion, String nodeHotThreads) { + HotThreadsLoggingTask( + DiscoveryNode discoveryNode, + long appliedVersion, + long expectedVersion, + String nodeHotThreads, + Releasable releasable + ) { this.nodeHotThreads = nodeHotThreads; + this.releasable = releasable; this.prefix = Strings.format( "hot threads from node [%s] lagging at version [%d] despite commit of cluster state version [%d]", discoveryNode.descriptionWithoutAttributes(), @@ -327,6 +337,11 @@ protected void doRun() throws Exception { } } + @Override + public void onAfter() { + Releasables.closeExpectNoException(releasable); + } + @Override public int compareTo(HotThreadsLoggingTask o) { return 0; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java index 76df8af1889a7..2942e59aa1bfd 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java @@ -116,7 +116,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.execute(TransportNodesHotThreadsAction.TYPE, nodesHotThreadsRequest, new RestResponseListener<>(channel) { @Override public RestResponse buildResponse(NodesHotThreadsResponse response) { - return RestResponse.chunked(RestStatus.OK, fromTextChunks(TEXT_CONTENT_TYPE, response.getTextChunks(), null)); + response.mustIncRef(); + return RestResponse.chunked(RestStatus.OK, fromTextChunks(TEXT_CONTENT_TYPE, response.getTextChunks(), response::decRef)); } }); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/hotthreads/NodesHotThreadsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/hotthreads/NodesHotThreadsResponseTests.java index b6c7b591cbaa5..33676489b04d5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/hotthreads/NodesHotThreadsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/hotthreads/NodesHotThreadsResponseTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.test.ESTestCase; import java.util.List; @@ -23,19 +25,33 @@ public class NodesHotThreadsResponseTests extends ESTestCase { public void testGetTextChunks() { final var node0 = DiscoveryNodeUtils.create("node-0"); final var node1 = DiscoveryNodeUtils.create("node-1"); - assertEquals(Strings.format(""" - ::: %s - node 0 line 1 - node 0 line 2 - - ::: %s - node 1 line 1 - node 1 line 2 - - """, node0, node1), getTextBodyContent(new NodesHotThreadsResponse(ClusterName.DEFAULT, List.of(new NodeHotThreads(node0, """ - node 0 line 1 - node 0 line 2"""), new NodeHotThreads(node1, """ - node 1 line 1 - node 1 line 2""")), List.of()).getTextChunks())); + final var response = new NodesHotThreadsResponse( + ClusterName.DEFAULT, + List.of( + + new NodeHotThreads(node0, ReleasableBytesReference.wrap(new BytesArray(""" + node 0 line 1 + node 0 line 2"""))), + + new NodeHotThreads(node1, ReleasableBytesReference.wrap(new BytesArray(""" + node 1 line 1 + node 1 line 2"""))) + ), + List.of() + ); + try { + assertEquals(Strings.format(""" + ::: %s + node 0 line 1 + node 0 line 2 + + ::: %s + node 1 line 1 + node 1 line 2 + + """, node0, node1), getTextBodyContent(response.getTextChunks())); + } finally { + response.decRef(); + } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java index 9a8aface0990c..f69596be8ce65 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LagDetectorTests.java @@ -266,7 +266,7 @@ public void testHotThreadsChunkedLoggingEncoding() { + node.descriptionWithoutAttributes() + "] lagging at version [1] despite commit of cluster state version [2]", ReferenceDocs.LAGGING_NODE_TROUBLESHOOTING, - new LagDetector.HotThreadsLoggingTask(node, 1, 2, expectedBody)::run + new LagDetector.HotThreadsLoggingTask(node, 1, 2, expectedBody, () -> {})::run ).utf8ToString() ); } From a8d19a5166c3c46b5af699dfc499d6fe8c69b692 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 8 Jan 2024 17:33:08 +0100 Subject: [PATCH 02/47] ESQL: Simplify how we transform WKB to WKT in response (#104053) --- .../compute/data/BasicBlockTests.java | 4 +- .../elasticsearch/xpack/esql/CsvAssert.java | 4 +- .../xpack/esql/action/ColumnInfo.java | 44 ++++++------------- .../xpack/esql/action/ResponseValueUtils.java | 17 +------ .../xpack/esql/formatter/TextFormat.java | 12 +---- .../xpack/esql/formatter/TextFormatter.java | 13 +----- .../xpack/esql/type/EsqlDataTypes.java | 9 ---- .../xpack/esql/formatter/TextFormatTests.java | 27 +++++++----- .../esql/formatter/TextFormatterTests.java | 27 ++++++++---- .../xpack/ql/util/SpatialCoordinateTypes.java | 42 +++++------------- 10 files changed, 66 insertions(+), 133 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 6ebc1550af517..c75bf583dbf36 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -445,11 +445,11 @@ public void testBytesRefBlock() { } public void testBytesRefBlockOnGeoPoints() { - testBytesRefBlock(() -> GEO.pointAsWKB(randomGeoPoint()), false, GEO::wkbAsPoint); + testBytesRefBlock(() -> GEO.pointAsWKB(randomGeoPoint()), false, GEO::wkbAsString); } public void testBytesRefBlockOnCartesianPoints() { - testBytesRefBlock(() -> CARTESIAN.pointAsWKB(randomCartesianPoint()), false, CARTESIAN::wkbAsPoint); + testBytesRefBlock(() -> CARTESIAN.pointAsWKB(randomCartesianPoint()), false, CARTESIAN::wkbAsString); } public void testBytesRefBlockBuilderWithNulls() { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 797ae51d06b49..49dc585c01753 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -202,9 +202,9 @@ public static void assertData( if (expectedType == Type.DATETIME) { expectedValue = rebuildExpected(expectedValue, Long.class, x -> UTC_DATE_TIME_FORMATTER.formatMillis((long) x)); } else if (expectedType == Type.GEO_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbAsPoint((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbAsString((BytesRef) x)); } else if (expectedType == Type.CARTESIAN_POINT) { - expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbAsPoint((BytesRef) x)); + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbAsString((BytesRef) x)); } else if (expectedType == Type.IP) { // convert BytesRef-packed IP to String, allowing subsequent comparison with what's expected expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> DocValueFormat.IP.format((BytesRef) x)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java index 71ce36e997300..673ec0bc4a184 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java @@ -28,7 +28,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; @@ -163,8 +162,20 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); } }; - case "geo_point" -> new PointPositionToXContent(block, GEO, scratch); - case "cartesian_point" -> new PointPositionToXContent(block, CARTESIAN, scratch); + case "geo_point" -> new PositionToXContent(block) { + @Override + protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) + throws IOException { + return builder.value(GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + } + }; + case "cartesian_point" -> new PositionToXContent(block) { + @Override + protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) + throws IOException { + return builder.value(CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + } + }; case "boolean" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) @@ -208,31 +219,4 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa default -> throw new IllegalArgumentException("can't convert values of type [" + type + "]"); }; } - - private class PointPositionToXContent extends PositionToXContent { - private final SpatialCoordinateTypes spatial; - private final BytesRef scratch; - - private PointPositionToXContent(Block block, SpatialCoordinateTypes spatial, BytesRef scratch) { - super(block); - this.spatial = spatial; - this.scratch = scratch; - } - - @Override - protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - if (block instanceof LongBlock longBlock) { - long encoded = longBlock.getLong(valueIndex); - String wkt = spatial.pointAsString(spatial.longAsPoint(encoded)); - return builder.value(wkt); - } else if (block instanceof BytesRefBlock wkbBlock) { - // This block only converts WKB to WKT, so does not need CRS, so we could remove this class if WKB was the only block type - BytesRef wkb = wkbBlock.getBytesRef(valueIndex, scratch); - String wkt = spatial.wkbAsString(wkb); - return builder.value(wkt); - } else { - throw new IllegalArgumentException("Unrecognized block type " + block.getWriteableName() + " for type " + type); - } - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index d68245aa3296e..625b488b1e857 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -30,7 +29,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; @@ -103,8 +101,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point" -> pointValueAt(GEO, dataType, block, offset, scratch); - case "cartesian_point" -> pointValueAt(CARTESIAN, dataType, block, offset, scratch); + case "geo_point" -> GEO.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "cartesian_point" -> CARTESIAN.wkbAsString(((BytesRefBlock) block).getBytesRef(offset, scratch)); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -121,17 +119,6 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef }; } - private static SpatialPoint pointValueAt(SpatialCoordinateTypes spatial, String dataType, Block block, int offset, BytesRef scratch) { - // TODO: Should we already return the WKT String here - if this is only used to display results? - if (block instanceof LongBlock longBlock) { - return spatial.longAsPoint(longBlock.getLong(offset)); - } else if (block instanceof BytesRefBlock wkbBlock) { - return spatial.wkbAsPoint(wkbBlock.getBytesRef(offset, scratch)); - } else { - throw new IllegalArgumentException("Unsupported block type for " + dataType + ": " + block.getWriteableName()); - } - } - /** * Converts a list of values to Pages so that we can parse from xcontent. It's not * super efficient, but it doesn't really have to be. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java index 10f5e3c3da3c5..ac8f9560074f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xcontent.MediaType; @@ -291,19 +290,10 @@ public Iterator> format(RestRequest request hasHeader(request) && esqlResponse.columns() != null ? Iterators.single(writer -> row(writer, esqlResponse.columns().iterator(), ColumnInfo::name, delimiter)) : Collections.emptyIterator(), - Iterators.map(esqlResponse.values(), row -> writer -> row(writer, row, TextFormat::formatEsqlResultObject, delimiter)) + Iterators.map(esqlResponse.values(), row -> writer -> row(writer, row, f -> Objects.toString(f, StringUtils.EMPTY), delimiter)) ); } - private static String formatEsqlResultObject(Object obj) { - // TODO: It would be nicer to override GeoPoint.toString() but that has consequences - if (obj instanceof SpatialPoint point) { - // SpatialPoint.toWKT maintains max precision, while ´%f´ conveniently simplifies for display purposes - return String.format(Locale.ROOT, "POINT (%f %f)", point.getX(), point.getY()); - } - return Objects.toString(obj, StringUtils.EMPTY); - } - boolean hasHeader(RestRequest request) { return true; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java index 9ac9482e2a59d..0535e4adfe346 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.formatter; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -16,7 +15,6 @@ import java.io.Writer; import java.util.Collections; import java.util.Iterator; -import java.util.Locale; import java.util.Objects; import java.util.function.Function; @@ -31,7 +29,7 @@ public class TextFormatter { private final EsqlQueryResponse response; private final int[] width; - private final Function FORMATTER = TextFormatter::formatEsqlResultObject; + private final Function FORMATTER = Objects::toString; /** * Create a new {@linkplain TextFormatter} for formatting responses. @@ -130,13 +128,4 @@ private static void writePadding(int padding, Writer writer) throws IOException writer.append(PADDING_64, 0, padding); } } - - private static String formatEsqlResultObject(Object obj) { - // TODO: It would be nicer to override GeoPoint.toString() but that has consequences - if (obj instanceof SpatialPoint point) { - // SpatialPoint.toWKT maintains max precision, while ´%f´ conveniently simplifies for display purposes - return String.format(Locale.ROOT, "POINT (%f %f)", point.getX(), point.getY()); - } - return Objects.toString(obj); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index bffe7544de3c7..eae808abb5037 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.type; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -125,13 +123,6 @@ public static DataType fromJava(Object value) { if (value instanceof String || value instanceof Character || value instanceof BytesRef) { return KEYWORD; } - if (value instanceof GeoPoint) { - return GEO_POINT; - } - if (value instanceof SpatialPoint) { - // TODO: we have no access to CartesianPoint, but since it implements SpatialPoint we can use that here for now - return CARTESIAN_POINT; - } return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index 8f66438a6f4c2..bbe32350a0465 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.esql.formatter; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.geometry.Point; @@ -122,16 +124,16 @@ public void testCsvFormatWithRegularData() { String text = format(CSV, req(), regularData()); assertEquals(""" string,number,location,location2\r - Along The River Bank,708,POINT (12.000000 56.000000),POINT (1234.000000 5678.000000)\r - Mind Train,280,POINT (-97.000000 26.000000),POINT (-9753.000000 2611.000000)\r + Along The River Bank,708,POINT (12.0 56.0),POINT (1234.0 5678.0)\r + Mind Train,280,POINT (-97.0 26.0),POINT (-9753.0 2611.0)\r """, text); } public void testCsvFormatNoHeaderWithRegularData() { String text = format(CSV, reqWithParam("header", "absent"), regularData()); assertEquals(""" - Along The River Bank,708,POINT (12.000000 56.000000),POINT (1234.000000 5678.000000)\r - Mind Train,280,POINT (-97.000000 26.000000),POINT (-9753.000000 2611.000000)\r + Along The River Bank,708,POINT (12.0 56.0),POINT (1234.0 5678.0)\r + Mind Train,280,POINT (-97.0 26.0),POINT (-9753.0 2611.0)\r """, text); } @@ -146,12 +148,12 @@ public void testCsvFormatWithCustomDelimiterRegularData() { "location2", "Along The River Bank", "708", - "POINT (12.000000 56.000000)", - "POINT (1234.000000 5678.000000)", + "POINT (12.0 56.0)", + "POINT (1234.0 5678.0)", "Mind Train", "280", - "POINT (-97.000000 26.000000)", - "POINT (-9753.000000 2611.000000)" + "POINT (-97.0 26.0)", + "POINT (-9753.0 2611.0)" ); List expectedTerms = terms.stream() .map(x -> x.contains(String.valueOf(delim)) ? '"' + x + '"' : x) @@ -174,8 +176,8 @@ public void testTsvFormatWithRegularData() { String text = format(TSV, req(), regularData()); assertEquals(""" string\tnumber\tlocation\tlocation2 - Along The River Bank\t708\tPOINT (12.000000 56.000000)\tPOINT (1234.000000 5678.000000) - Mind Train\t280\tPOINT (-97.000000 26.000000)\tPOINT (-9753.000000 2611.000000) + Along The River Bank\t708\tPOINT (12.0 56.0)\tPOINT (1234.0 5678.0) + Mind Train\t280\tPOINT (-97.0 26.0)\tPOINT (-9753.0 2611.0) """, text); } @@ -257,6 +259,9 @@ private static EsqlQueryResponse regularData() { new ColumnInfo("location2", "cartesian_point") ); + BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); + geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); + geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); // values List values = List.of( new Page( @@ -265,7 +270,7 @@ private static EsqlQueryResponse regularData() { .appendBytesRef(new BytesRef("Mind Train")) .build(), blockFactory.newIntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), - blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), + blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index 99cfa2fa45a8d..b8800713eca89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.geometry.Point; @@ -42,6 +44,13 @@ public class TextFormatterTests extends ESTestCase { new ColumnInfo("location2", "cartesian_point"), new ColumnInfo("null_field2", "keyword") ); + + private static final BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); + static { + geoPoints.append(GEO.pointAsWKB(new Point(12, 56))); + geoPoints.append(GEO.pointAsWKB(new Point(-97, 26))); + } + EsqlQueryResponse esqlResponse = new EsqlQueryResponse( columns, List.of( @@ -61,7 +70,7 @@ public class TextFormatterTests extends ESTestCase { UTC_DATE_TIME_FORMATTER.parseMillis("2000-03-15T21:34:37.443Z") }, 2 ).asBlock(), - blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), + blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) @@ -88,22 +97,22 @@ public void testFormatWithHeader() { assertThat(result, arrayWithSize(4)); assertEquals( " foo | bar |15charwidename!| null_field1 |superduperwidename!!!| baz |" - + " date | location | location2 | null_field2 ", + + " date | location | location2 | null_field2 ", result[0] ); assertEquals( - "---------------+---------------+---------------+---------------+---------------------+---------------+" - + "------------------------+----------------------------+--------------------------------+---------------", + "---------------+---------------+---------------+---------------+---------------------+---------------+-------" + + "-----------------+------------------+----------------------+---------------", result[1] ); assertEquals( "15charwidedata!|1 |6.888 |null |12.0 |rabbit |" - + "1953-09-02T00:00:00.000Z|POINT (12.000000 56.000000) |POINT (1234.000000 5678.000000) |null ", + + "1953-09-02T00:00:00.000Z|POINT (12.0 56.0) |POINT (1234.0 5678.0) |null ", result[2] ); assertEquals( "dog |2 |123124.888 |null |9912.0 |goat |" - + "2000-03-15T21:34:37.443Z|POINT (-97.000000 26.000000)|POINT (-9753.000000 2611.000000)|null ", + + "2000-03-15T21:34:37.443Z|POINT (-97.0 26.0)|POINT (-9753.0 2611.0)|null ", result[3] ); } @@ -135,7 +144,7 @@ public void testFormatWithoutHeader() { UTC_DATE_TIME_FORMATTER.parseMillis("2231-12-31T23:59:59.999Z") }, 2 ).asBlock(), - blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), + blockFactory.newBytesRefArrayVector(geoPoints, 2).asBlock(), blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(1234, 5678))) .appendBytesRef(CARTESIAN.pointAsWKB(new Point(-9753, 2611))) @@ -152,12 +161,12 @@ public void testFormatWithoutHeader() { assertThat(result, arrayWithSize(2)); assertEquals( "doggie |4 |1.0 |null |77.0 |wombat |" - + "1955-01-21T01:02:03.342Z|POINT (12.000000 56.000000) |POINT (1234.000000 5678.000000) |null ", + + "1955-01-21T01:02:03.342Z|POINT (12.0 56.0) |POINT (1234.0 5678.0) |null ", result[0] ); assertEquals( "dog |2 |123124.888 |null |9912.0 |goat |" - + "2231-12-31T23:59:59.999Z|POINT (-97.000000 26.000000)|POINT (-9753.000000 2611.000000)|null ", + + "2231-12-31T23:59:59.999Z|POINT (-97.0 26.0)|POINT (-9753.0 2611.0)|null ", result[1] ); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index 86e65c03d9162..e1fdd6f364258 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -34,10 +34,6 @@ public long pointAsLong(double x, double y) { int longitudeEncoded = encodeLongitude(x); return (((long) latitudeEncoded) << 32) | (longitudeEncoded & 0xFFFFFFFFL); } - - public SpatialPoint pointAsPoint(Point point) { - return new GeoPoint(point.getY(), point.getX()); - } }, CARTESIAN { public SpatialPoint longAsPoint(long encoded) { @@ -56,10 +52,6 @@ public long pointAsLong(double x, double y) { return (yi & 0xFFFFFFFFL) | xi << 32; } - public SpatialPoint pointAsPoint(Point point) { - return makePoint(point.getX(), point.getY()); - } - private SpatialPoint makePoint(double x, double y) { return new SpatialPoint() { @Override @@ -121,17 +113,6 @@ public Point stringAsPoint(String string) { } } - protected abstract SpatialPoint pointAsPoint(Point point); - - public BytesRef stringAsWKB(String string) { - try { - Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, string); - return new BytesRef(WellKnownBinary.toWKB(geometry, ByteOrder.LITTLE_ENDIAN)); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); - } - } - public BytesRef pointAsWKB(SpatialPoint point) { return pointAsWKB(new Point(point.getX(), point.getY())); } @@ -153,21 +134,18 @@ public long wkbAsLong(BytesRef wkb) { } } - public String wkbAsString(BytesRef wkb) { - Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - if (geometry instanceof Point point) { - // WellKnownText.toWKT renders points slightly differently to SpatialPoint.toWKT - return pointAsString(pointAsPoint(point)); + public BytesRef stringAsWKB(String string) { + // TODO: we should be able to transform WKT to WKB without building the geometry + // we should as well use different validator for cartesian and geo? + try { + Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, string); + return new BytesRef(WellKnownBinary.toWKB(geometry, ByteOrder.LITTLE_ENDIAN)); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); } - return WellKnownText.toWKT(geometry); } - public SpatialPoint wkbAsPoint(BytesRef wkb) { - Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); - if (geometry instanceof Point point) { - return pointAsPoint(point); - } else { - throw new IllegalArgumentException("Unsupported geometry: " + geometry.type()); - } + public String wkbAsString(BytesRef wkb) { + return WellKnownText.fromWKB(wkb.bytes, wkb.offset, wkb.length); } } From 62d466d2e179f7c7de46458e0442354596298107 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 8 Jan 2024 16:48:52 +0000 Subject: [PATCH 03/47] Add test matchers for the return values of predicates, and migrate existing uses (#104062) --- .../index/shard/IndexShardIT.java | 12 ++-- .../node/DiscoveryNodeRoleSettingTests.java | 6 +- .../common/regex/RegexTests.java | 42 +++++------ .../query/SearchIndexNameMatcherTests.java | 38 +++++----- .../indices/IndicesModuleTests.java | 22 +++--- .../SniffConnectionStrategyTests.java | 38 +++++----- .../common/inject/ModuleTestCase.java | 6 +- .../elasticsearch/test/LambdaMatchers.java | 70 +++++++++++++++++++ .../test/LambdaMatchersTests.java | 17 +++++ .../action/RollupSearchActionTests.java | 4 +- .../xpack/security/SecurityTests.java | 8 ++- .../support/ApiKeyBoolQueryBuilderTests.java | 6 +- 12 files changed, 188 insertions(+), 81 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index 52bb5159c9b7d..c0263e273354f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -95,6 +95,8 @@ import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.elasticsearch.index.shard.IndexShardTestCase.getTranslog; import static org.elasticsearch.index.shard.IndexShardTestCase.recoverFromStore; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -167,13 +169,13 @@ public void testDurableFlagHasEffect() { } }; setDurability(shard, Translog.Durability.REQUEST); - assertFalse(needsSync.test(translog)); + assertThat(needsSync, falseWith(translog)); setDurability(shard, Translog.Durability.ASYNC); prepareIndex("test").setId("2").setSource("{}", XContentType.JSON).get(); - assertTrue(needsSync.test(translog)); + assertThat(needsSync, trueWith(translog)); setDurability(shard, Translog.Durability.REQUEST); client().prepareDelete("test", "1").get(); - assertFalse(needsSync.test(translog)); + assertThat(needsSync, falseWith(translog)); setDurability(shard, Translog.Durability.ASYNC); client().prepareDelete("test", "2").get(); @@ -185,7 +187,7 @@ public void testDurableFlagHasEffect() { .add(client().prepareDelete("test", "1")) .get() ); - assertFalse(needsSync.test(translog)); + assertThat(needsSync, falseWith(translog)); setDurability(shard, Translog.Durability.ASYNC); assertNoFailures( @@ -195,7 +197,7 @@ public void testDurableFlagHasEffect() { .get() ); setDurability(shard, Translog.Durability.REQUEST); - assertTrue(needsSync.test(translog)); + assertThat(needsSync, trueWith(translog)); } private void setDurability(IndexShard shard, Translog.Durability durability) { diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeRoleSettingTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeRoleSettingTests.java index cbd07f3410910..6389f94df0be1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeRoleSettingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeRoleSettingTests.java @@ -14,6 +14,8 @@ import java.util.Set; import java.util.function.Predicate; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.elasticsearch.test.NodeRoles.addRoles; import static org.elasticsearch.test.NodeRoles.nonDataNode; import static org.elasticsearch.test.NodeRoles.onlyRole; @@ -40,10 +42,10 @@ public void testIsRemoteClusterClient() { } private void runRoleTest(final Predicate predicate, final DiscoveryNodeRole role) { - assertTrue(predicate.test(onlyRole(role))); + assertThat(predicate, trueWith(onlyRole(role))); assertThat(DiscoveryNode.getRolesFromSettings(onlyRole(role)), hasItem(role)); - assertFalse(predicate.test(removeRoles(Set.of(role)))); + assertThat(predicate, falseWith(removeRoles(Set.of(role)))); assertThat(DiscoveryNode.getRolesFromSettings(removeRoles(Set.of(role))), not(hasItem(role))); } diff --git a/server/src/test/java/org/elasticsearch/common/regex/RegexTests.java b/server/src/test/java/org/elasticsearch/common/regex/RegexTests.java index f7667a948c048..f010b233f40c2 100644 --- a/server/src/test/java/org/elasticsearch/common/regex/RegexTests.java +++ b/server/src/test/java/org/elasticsearch/common/regex/RegexTests.java @@ -16,6 +16,8 @@ import java.util.Random; import java.util.regex.Pattern; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.Matchers.equalTo; public class RegexTests extends ESTestCase { @@ -213,25 +215,25 @@ private void assertMatchesNone(Automaton automaton, String... strings) { } public void testSimpleMatcher() { - assertFalse(Regex.simpleMatcher((String[]) null).test("abc")); - assertFalse(Regex.simpleMatcher().test("abc")); - assertTrue(Regex.simpleMatcher("abc").test("abc")); - assertFalse(Regex.simpleMatcher("abc").test("abd")); - - assertTrue(Regex.simpleMatcher("abc", "xyz").test("abc")); - assertTrue(Regex.simpleMatcher("abc", "xyz").test("xyz")); - assertFalse(Regex.simpleMatcher("abc", "xyz").test("abd")); - assertFalse(Regex.simpleMatcher("abc", "xyz").test("xyy")); - - assertTrue(Regex.simpleMatcher("abc", "*").test("abc")); - assertTrue(Regex.simpleMatcher("abc", "*").test("abd")); - - assertTrue(Regex.simpleMatcher("a*c").test("abc")); - assertFalse(Regex.simpleMatcher("a*c").test("abd")); - - assertTrue(Regex.simpleMatcher("a*c", "x*z").test("abc")); - assertTrue(Regex.simpleMatcher("a*c", "x*z").test("xyz")); - assertFalse(Regex.simpleMatcher("a*c", "x*z").test("abd")); - assertFalse(Regex.simpleMatcher("a*c", "x*z").test("xyy")); + assertThat(Regex.simpleMatcher((String[]) null), falseWith("abc")); + assertThat(Regex.simpleMatcher(), falseWith("abc")); + assertThat(Regex.simpleMatcher("abc"), trueWith("abc")); + assertThat(Regex.simpleMatcher("abc"), falseWith("abd")); + + assertThat(Regex.simpleMatcher("abc", "xyz"), trueWith("abc")); + assertThat(Regex.simpleMatcher("abc", "xyz"), trueWith("xyz")); + assertThat(Regex.simpleMatcher("abc", "xyz"), falseWith("abd")); + assertThat(Regex.simpleMatcher("abc", "xyz"), falseWith("xyy")); + + assertThat(Regex.simpleMatcher("abc", "*"), trueWith("abc")); + assertThat(Regex.simpleMatcher("abc", "*"), trueWith("abd")); + + assertThat(Regex.simpleMatcher("a*c"), trueWith("abc")); + assertThat(Regex.simpleMatcher("a*c"), falseWith("abd")); + + assertThat(Regex.simpleMatcher("a*c", "x*z"), trueWith("abc")); + assertThat(Regex.simpleMatcher("a*c", "x*z"), trueWith("xyz")); + assertThat(Regex.simpleMatcher("a*c", "x*z"), falseWith("abd")); + assertThat(Regex.simpleMatcher("a*c", "x*z"), falseWith("xyy")); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java index bf66f4c93d6ef..b038fbb911f00 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchIndexNameMatcherTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -46,31 +48,31 @@ private static IndexMetadata.Builder indexBuilder(String index) { } public void testLocalIndex() { - assertTrue(matcher.test("index1")); - assertTrue(matcher.test("ind*x1")); - assertFalse(matcher.test("index2")); + assertThat(matcher, trueWith("index1")); + assertThat(matcher, trueWith("ind*x1")); + assertThat(matcher, falseWith("index2")); - assertTrue(matcher.test("alias")); - assertTrue(matcher.test("*lias")); + assertThat(matcher, trueWith("alias")); + assertThat(matcher, trueWith("*lias")); - assertFalse(matcher.test("cluster:index1")); + assertThat(matcher, falseWith("cluster:index1")); } public void testRemoteIndex() { - assertTrue(remoteMatcher.test("cluster:index1")); - assertTrue(remoteMatcher.test("cluster:ind*x1")); - assertTrue(remoteMatcher.test("*luster:ind*x1")); - assertFalse(remoteMatcher.test("cluster:index2")); + assertThat(remoteMatcher, trueWith("cluster:index1")); + assertThat(remoteMatcher, trueWith("cluster:ind*x1")); + assertThat(remoteMatcher, trueWith("*luster:ind*x1")); + assertThat(remoteMatcher, falseWith("cluster:index2")); - assertTrue(remoteMatcher.test("cluster:alias")); - assertTrue(remoteMatcher.test("cluster:*lias")); + assertThat(remoteMatcher, trueWith("cluster:alias")); + assertThat(remoteMatcher, trueWith("cluster:*lias")); - assertFalse(remoteMatcher.test("index1")); - assertFalse(remoteMatcher.test("alias")); + assertThat(remoteMatcher, falseWith("index1")); + assertThat(remoteMatcher, falseWith("alias")); - assertFalse(remoteMatcher.test("*index1")); - assertFalse(remoteMatcher.test("*alias")); - assertFalse(remoteMatcher.test("cluster*")); - assertFalse(remoteMatcher.test("cluster*index1")); + assertThat(remoteMatcher, falseWith("*index1")); + assertThat(remoteMatcher, falseWith("*alias")); + assertThat(remoteMatcher, falseWith("cluster*")); + assertThat(remoteMatcher, falseWith("cluster*index1")); } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 2ce85a598541e..1648e38a3f0b9 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -45,6 +45,8 @@ import java.util.function.Function; import java.util.function.Predicate; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; @@ -239,7 +241,7 @@ public void testFieldNamesIsLastWithPlugins() { } public void testGetFieldFilter() { - List mapperPlugins = Arrays.asList(new MapperPlugin() { + List mapperPlugins = List.of(new MapperPlugin() { }, new MapperPlugin() { @Override public Function> getFieldFilter() { @@ -262,16 +264,16 @@ public Function> getFieldFilter() { Function> fieldFilter = mapperRegistry.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); - assertFalse(fieldFilter.apply("hidden_index").test(randomAlphaOfLengthBetween(3, 5))); - assertTrue(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test(randomAlphaOfLengthBetween(3, 5))); + assertThat(fieldFilter.apply("hidden_index"), falseWith(randomAlphaOfLengthBetween(3, 5))); + assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)), trueWith(randomAlphaOfLengthBetween(3, 5))); - assertFalse(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test("hidden_field")); - assertFalse(fieldFilter.apply("filtered").test(randomAlphaOfLengthBetween(3, 5))); - assertFalse(fieldFilter.apply("filtered").test("hidden_field")); - assertTrue(fieldFilter.apply("filtered").test("visible")); - assertFalse(fieldFilter.apply("hidden_index").test("visible")); - assertTrue(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test("visible")); - assertFalse(fieldFilter.apply("hidden_index").test("hidden_field")); + assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)), falseWith("hidden_field")); + assertThat(fieldFilter.apply("filtered"), falseWith(randomAlphaOfLengthBetween(3, 5))); + assertThat(fieldFilter.apply("filtered"), falseWith("hidden_field")); + assertThat(fieldFilter.apply("filtered"), trueWith("visible")); + assertThat(fieldFilter.apply("hidden_index"), falseWith("visible")); + assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)), trueWith("visible")); + assertThat(fieldFilter.apply("hidden_index"), falseWith("hidden_field")); } public void testDefaultFieldFilterIsNoOp() { diff --git a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java index ddee1ff4d690a..121e3ec1d35de 100644 --- a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java @@ -55,6 +55,8 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.endsWith; @@ -1063,7 +1065,7 @@ public void testGetNodePredicateNodeRoles() { Predicate nodePredicate = SniffConnectionStrategy.getNodePredicate(Settings.EMPTY); { DiscoveryNode all = DiscoveryNodeUtils.create("id", address); - assertTrue(nodePredicate.test(all)); + assertThat(nodePredicate, trueWith(all)); } { DiscoveryNode dataMaster = DiscoveryNodeUtils.create( @@ -1072,7 +1074,7 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.MASTER_ROLE) ); - assertTrue(nodePredicate.test(dataMaster)); + assertThat(nodePredicate, trueWith(dataMaster)); } { DiscoveryNode dedicatedMaster = DiscoveryNodeUtils.create( @@ -1081,7 +1083,7 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.MASTER_ROLE) ); - assertFalse(nodePredicate.test(dedicatedMaster)); + assertThat(nodePredicate, falseWith(dedicatedMaster)); } { DiscoveryNode dedicatedIngest = DiscoveryNodeUtils.create( @@ -1090,7 +1092,7 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.INGEST_ROLE) ); - assertTrue(nodePredicate.test(dedicatedIngest)); + assertThat(nodePredicate, trueWith(dedicatedIngest)); } { DiscoveryNode masterIngest = DiscoveryNodeUtils.create( @@ -1099,7 +1101,7 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.INGEST_ROLE, DiscoveryNodeRole.MASTER_ROLE) ); - assertTrue(nodePredicate.test(masterIngest)); + assertThat(nodePredicate, trueWith(masterIngest)); } { DiscoveryNode dedicatedData = DiscoveryNodeUtils.create( @@ -1108,7 +1110,7 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.DATA_ROLE) ); - assertTrue(nodePredicate.test(dedicatedData)); + assertThat(nodePredicate, trueWith(dedicatedData)); } { DiscoveryNode ingestData = DiscoveryNodeUtils.create( @@ -1117,11 +1119,11 @@ public void testGetNodePredicateNodeRoles() { Collections.emptyMap(), Set.of(DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.INGEST_ROLE) ); - assertTrue(nodePredicate.test(ingestData)); + assertThat(nodePredicate, trueWith(ingestData)); } { DiscoveryNode coordOnly = DiscoveryNodeUtils.create("id", address, Collections.emptyMap(), Set.of()); - assertTrue(nodePredicate.test(coordOnly)); + assertThat(nodePredicate, trueWith(coordOnly)); } } @@ -1143,18 +1145,18 @@ public void testGetNodePredicateNodeAttrs() { Predicate nodePredicate = SniffConnectionStrategy.getNodePredicate(settings); { DiscoveryNode nonGatewayNode = DiscoveryNodeUtils.create("id", address, Collections.singletonMap("gateway", "false"), roles); - assertFalse(nodePredicate.test(nonGatewayNode)); - assertTrue(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY).test(nonGatewayNode)); + assertThat(nodePredicate, falseWith(nonGatewayNode)); + assertThat(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY), trueWith(nonGatewayNode)); } { DiscoveryNode gatewayNode = DiscoveryNodeUtils.create("id", address, Collections.singletonMap("gateway", "true"), roles); - assertTrue(nodePredicate.test(gatewayNode)); - assertTrue(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY).test(gatewayNode)); + assertThat(nodePredicate, trueWith(gatewayNode)); + assertThat(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY), trueWith(gatewayNode)); } { DiscoveryNode noAttrNode = DiscoveryNodeUtils.create("id", address, Collections.emptyMap(), roles); - assertFalse(nodePredicate.test(noAttrNode)); - assertTrue(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY).test(noAttrNode)); + assertThat(nodePredicate, falseWith(noAttrNode)); + assertThat(SniffConnectionStrategy.getNodePredicate(Settings.EMPTY), trueWith(noAttrNode)); } } @@ -1171,7 +1173,7 @@ public void testGetNodePredicatesCombination() { Collections.singletonMap("gateway", "true"), dedicatedMasterRoles ); - assertFalse(nodePredicate.test(node)); + assertThat(nodePredicate, falseWith(node)); } { DiscoveryNode node = DiscoveryNodeUtils.create( @@ -1180,7 +1182,7 @@ public void testGetNodePredicatesCombination() { Collections.singletonMap("gateway", "false"), dedicatedMasterRoles ); - assertFalse(nodePredicate.test(node)); + assertThat(nodePredicate, falseWith(node)); } { DiscoveryNode node = DiscoveryNodeUtils.create( @@ -1189,11 +1191,11 @@ public void testGetNodePredicatesCombination() { Collections.singletonMap("gateway", "false"), dedicatedMasterRoles ); - assertFalse(nodePredicate.test(node)); + assertThat(nodePredicate, falseWith(node)); } { DiscoveryNode node = DiscoveryNodeUtils.create("id", address, Collections.singletonMap("gateway", "true"), allRoles); - assertTrue(nodePredicate.test(node)); + assertThat(nodePredicate, trueWith(node)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java index df8c0e9ad4b32..6f378f263490c 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -17,6 +17,8 @@ import java.util.List; import java.util.function.Predicate; +import static org.elasticsearch.test.LambdaMatchers.trueWith; + /** * Base testcase for testing {@link Module} implementations. */ @@ -44,13 +46,13 @@ private static void assertInstanceBindingWithAnnotation( if (element instanceof InstanceBinding binding) { if (to.equals(binding.getKey().getTypeLiteral().getType())) { if (annotation == null || annotation.equals(binding.getKey().getAnnotationType())) { - assertTrue(tester.test(to.cast(binding.getInstance()))); + assertThat(tester, trueWith(to.cast(binding.getInstance()))); return; } } } else if (element instanceof ProviderInstanceBinding binding) { if (to.equals(binding.getKey().getTypeLiteral().getType())) { - assertTrue(tester.test(to.cast(binding.getProviderInstance().get()))); + assertThat(tester, trueWith(to.cast(binding.getProviderInstance().get()))); return; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java index c41668c6541f2..674d3e87a008b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/LambdaMatchers.java @@ -18,6 +18,7 @@ import java.util.Iterator; import java.util.List; import java.util.function.Function; +import java.util.function.Predicate; public class LambdaMatchers { @@ -188,4 +189,73 @@ public void describeTo(Description description) { public static Matcher transformedArrayItemsMatch(Function function, Matcher matcher) { return new ArrayTransformMatcher<>(matcher, function); } + + private static class PredicateMatcher extends BaseMatcher> { + final T item; + + private PredicateMatcher(T item) { + this.item = item; + } + + @Override + @SuppressWarnings({ "rawtypes" }) + public boolean matches(Object actual) { + Predicate p = (Predicate) actual; + try { + return predicateMatches(p); + } catch (ClassCastException e) { + return false; + } + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected boolean predicateMatches(Predicate predicate) { + return predicate.test(item); + } + + @Override + @SuppressWarnings({ "rawtypes", "unchecked" }) + public void describeMismatch(Object item, Description description) { + Predicate p = (Predicate) item; + try { + boolean result = p.test(this.item); + description.appendText("predicate with argument ").appendValue(this.item).appendText(" evaluated to ").appendValue(result); + } catch (ClassCastException e) { + description.appendText("predicate did not accept argument of type ") + .appendValue(this.item.getClass()) + .appendText(" (") + .appendText(e.getMessage()) + .appendText(")"); + } + } + + @Override + public void describeTo(Description description) { + description.appendText("predicate evaluates to with argument ").appendValue(item); + } + } + + public static Matcher> trueWith(T item) { + return new PredicateMatcher<>(item); + } + + private static class PredicateFalseMatcher extends PredicateMatcher { + private PredicateFalseMatcher(T item) { + super(item); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected boolean predicateMatches(Predicate predicate) { + return predicate.test(item) == false; + } + + @Override + public void describeTo(Description description) { + description.appendText("predicate evaluates to with argument ").appendValue(item); + } + } + + public static Matcher> falseWith(T item) { + return new PredicateFalseMatcher<>(item); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java index 5259a6bd6bbd1..ec9f6c507a972 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/LambdaMatchersTests.java @@ -13,9 +13,11 @@ import java.util.List; +import static org.elasticsearch.test.LambdaMatchers.falseWith; import static org.elasticsearch.test.LambdaMatchers.transformedArrayItemsMatch; import static org.elasticsearch.test.LambdaMatchers.transformedItemsMatch; import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -97,6 +99,21 @@ public void testArrayTransformDescription() { ); } + public void testPredicateMatcher() { + assertThat(t -> true, trueWith(new Object())); + assertThat(t -> true, trueWith(null)); + assertThat(t -> false, falseWith(new Object())); + assertThat(t -> false, falseWith(null)); + + assertMismatch(t -> false, trueWith("obj"), equalTo("predicate with argument \"obj\" evaluated to ")); + assertMismatch(t -> true, falseWith("obj"), equalTo("predicate with argument \"obj\" evaluated to ")); + } + + public void testPredicateMatcherDescription() { + assertDescribeTo(trueWith("obj"), equalTo("predicate evaluates to with argument \"obj\"")); + assertDescribeTo(falseWith("obj"), equalTo("predicate evaluates to with argument \"obj\"")); + } + static void assertMismatch(T v, Matcher matcher, Matcher mismatchDescriptionMatcher) { assertThat(v, not(matcher)); StringDescription description = new StringDescription(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchActionTests.java index 830f0c09506e6..91ed19cb5389c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchActionTests.java @@ -11,9 +11,11 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.support.Automatons; +import static org.elasticsearch.test.LambdaMatchers.trueWith; + public class RollupSearchActionTests extends ESTestCase { public void testIndexReadPrivilegeCanPerformRollupSearchAction() { - assertTrue(Automatons.predicate(IndexPrivilege.READ.getAutomaton()).test(RollupSearchAction.NAME)); + assertThat(Automatons.predicate(IndexPrivilege.READ.getAutomaton()), trueWith(RollupSearchAction.NAME)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 18929c70cbe7d..022f7228a056f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -123,6 +123,8 @@ import java.util.stream.Stream; import static java.util.Collections.emptyMap; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; import static org.elasticsearch.xpack.security.operator.OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE; import static org.elasticsearch.xpack.security.operator.OperatorPrivileges.OPERATOR_PRIVILEGES_ENABLED; @@ -483,10 +485,10 @@ public void testGetFieldFilterSecurityEnabled() throws Exception { IndicesAccessControl indicesAccessControl = new IndicesAccessControl(true, permissionsMap); securityContext.putIndicesAccessControl(indicesAccessControl); - assertTrue(fieldFilter.apply("index_granted").test("field_granted")); - assertFalse(fieldFilter.apply("index_granted").test(randomAlphaOfLengthBetween(3, 10))); + assertThat(fieldFilter.apply("index_granted"), trueWith("field_granted")); + assertThat(fieldFilter.apply("index_granted"), falseWith(randomAlphaOfLengthBetween(3, 10))); assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_granted_all_permissions")); - assertTrue(fieldFilter.apply("index_granted_all_permissions").test(randomAlphaOfLengthBetween(3, 10))); + assertThat(fieldFilter.apply("index_granted_all_permissions"), trueWith(randomAlphaOfLengthBetween(3, 10))); assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_other")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java index d7f994d7499fe..477409f22369f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java @@ -37,6 +37,8 @@ import java.util.List; import java.util.function.Predicate; +import static org.elasticsearch.test.LambdaMatchers.falseWith; +import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators.FIELD_NAME_TRANSLATORS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -318,10 +320,10 @@ private void testAllowedIndexFieldName(Predicate predicate) { "metadata_flattened." + randomAlphaOfLengthBetween(1, 10), "creator." + randomAlphaOfLengthBetween(1, 10) ); - assertTrue(predicate.test(allowedField)); + assertThat(predicate, trueWith(allowedField)); final String disallowedField = randomBoolean() ? (randomAlphaOfLengthBetween(1, 3) + allowedField) : (allowedField.substring(1)); - assertFalse(predicate.test(disallowedField)); + assertThat(predicate, falseWith(disallowedField)); } private void assertCommonFilterQueries(ApiKeyBoolQueryBuilder qb, Authentication authentication) { From 22934b8063f14fe6363318d653bb7524f71ca2ce Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 8 Jan 2024 08:52:51 -0800 Subject: [PATCH 04/47] AsyncOperator#isFinished must never return true on failure (#104029) Enrich IT tests can return OK with some missing results instead of Failure when the enrich lookup hits circuit breakers. This is due to a race condition in isFinished and onFailure within the AsyncOperator. When an async lookup fails, we set the exception and then discard pages. Unfortunately, in the isFinished method, we perform the checks in the same order: first, we check for failure, and then we check for outstanding pages. If there is a long pause between these steps, isFinished might not detect the failure but see no outstanding pages, leading it to return true despite the presence of a failure. This change swaps the order of the checks. --- docs/changelog/104029.yaml | 5 ++ .../compute/operator/AsyncOperator.java | 8 ++- .../compute/operator/AsyncOperatorTests.java | 49 +++++++++++++++++++ 3 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/104029.yaml diff --git a/docs/changelog/104029.yaml b/docs/changelog/104029.yaml new file mode 100644 index 0000000000000..2b74d3b634dba --- /dev/null +++ b/docs/changelog/104029.yaml @@ -0,0 +1,5 @@ +pr: 104029 +summary: '`AsyncOperator#isFinished` must never return true on failure' +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 98ba37e3f32d1..bcab6a39496fd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -181,8 +181,12 @@ public void finish() { @Override public boolean isFinished() { - checkFailure(); - return finished && checkpoint.getPersistedCheckpoint() == checkpoint.getMaxSeqNo(); + if (finished && checkpoint.getPersistedCheckpoint() == checkpoint.getMaxSeqNo()) { + checkFailure(); + return true; + } else { + return false; + } } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index c127ac2cf9507..a4370face45ad 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -43,6 +43,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.LongStream; @@ -270,6 +272,53 @@ protected void doClose() { } } + public void testIsFinished() { + int iters = iterations(10, 10_000); + BlockFactory blockFactory = blockFactory(); + for (int i = 0; i < iters; i++) { + DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); + CyclicBarrier barrier = new CyclicBarrier(2); + AsyncOperator asyncOperator = new AsyncOperator(driverContext, between(1, 10)) { + @Override + protected void performAsync(Page inputPage, ActionListener listener) { + ActionRunnable command = new ActionRunnable<>(listener) { + @Override + protected void doRun() { + try { + barrier.await(10, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + listener.onFailure(new ElasticsearchException("simulated")); + } + }; + threadPool.executor(ESQL_TEST_EXECUTOR).execute(command); + } + + @Override + protected void doClose() { + + } + }; + asyncOperator.addInput(new Page(blockFactory.newConstantIntBlockWith(randomInt(), between(1, 10)))); + asyncOperator.finish(); + try { + barrier.await(10, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + int numChecks = between(10, 100); + while (--numChecks >= 0) { + try { + assertFalse("must not finished or failed", asyncOperator.isFinished()); + } catch (ElasticsearchException e) { + assertThat(e.getMessage(), equalTo("simulated")); + break; + } + } + } + } + static class LookupService { private final ThreadPool threadPool; private final Map dict; From 4cfeb4aa8dcfdbe0ea319a0170b53920805cf6fa Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Jan 2024 18:14:44 +0100 Subject: [PATCH 05/47] Fix more SearchResponse leaks (#104069) Some more obvious leaks in tests fixed. --- .../action/search/ExpandSearchPhaseTests.java | 295 +++++++++------- .../search/FetchLookupFieldsPhaseTests.java | 324 +++++++++--------- .../action/search/FetchSearchPhaseTests.java | 24 ++ .../action/search/MockSearchPhaseContext.java | 5 +- .../test/seektracker/SeekTrackerPluginIT.java | 2 +- .../DownsampleActionSingleNodeTests.java | 10 +- .../transforms/pivot/PivotTests.java | 3 +- .../execution/TriggeredWatchStore.java | 5 +- .../execution/TriggeredWatchStoreTests.java | 2 +- 9 files changed, 375 insertions(+), 295 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 43d0edffced2b..63ac832b0723b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -55,74 +55,86 @@ public void testCollapseSingleHit() throws IOException { Map runtimeMappings = randomBoolean() ? emptyMap() : AbstractSearchTestCase.randomRuntimeMappings(); final MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - String collapseValue = randomBoolean() ? null : "boom"; + try { + String collapseValue = randomBoolean() ? null : "boom"; - mockSearchPhaseContext.getRequest() - .source( - new SearchSourceBuilder().collapse( - new CollapseBuilder("someField").setInnerHits( - IntStream.range(0, numInnerHits).mapToObj(hitNum -> new InnerHitBuilder().setName("innerHit" + hitNum)).toList() + mockSearchPhaseContext.getRequest() + .source( + new SearchSourceBuilder().collapse( + new CollapseBuilder("someField").setInnerHits( + IntStream.range(0, numInnerHits) + .mapToObj(hitNum -> new InnerHitBuilder().setName("innerHit" + hitNum)) + .toList() + ) ) - ) - ); - mockSearchPhaseContext.getRequest().source().query(originalQuery).runtimeMappings(runtimeMappings); - mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { - assertTrue(executedMultiSearch.compareAndSet(false, true)); - assertEquals(numInnerHits, request.requests().size()); - SearchRequest searchRequest = request.requests().get(0); - assertTrue(searchRequest.source().query() instanceof BoolQueryBuilder); + ); + mockSearchPhaseContext.getRequest().source().query(originalQuery).runtimeMappings(runtimeMappings); + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + assertTrue(executedMultiSearch.compareAndSet(false, true)); + assertEquals(numInnerHits, request.requests().size()); + SearchRequest searchRequest = request.requests().get(0); + assertTrue(searchRequest.source().query() instanceof BoolQueryBuilder); - BoolQueryBuilder groupBuilder = (BoolQueryBuilder) searchRequest.source().query(); - if (collapseValue == null) { - assertThat(groupBuilder.mustNot(), Matchers.contains(QueryBuilders.existsQuery("someField"))); - } else { - assertThat(groupBuilder.filter(), Matchers.contains(QueryBuilders.matchQuery("someField", "boom"))); - } - if (originalQuery != null) { - assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar"))); - } - assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices()); - assertThat(searchRequest.source().runtimeMappings(), equalTo(runtimeMappings)); + BoolQueryBuilder groupBuilder = (BoolQueryBuilder) searchRequest.source().query(); + if (collapseValue == null) { + assertThat(groupBuilder.mustNot(), Matchers.contains(QueryBuilders.existsQuery("someField"))); + } else { + assertThat(groupBuilder.filter(), Matchers.contains(QueryBuilders.matchQuery("someField", "boom"))); + } + if (originalQuery != null) { + assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar"))); + } + assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices()); + assertThat(searchRequest.source().runtimeMappings(), equalTo(runtimeMappings)); - List mSearchResponses = new ArrayList<>(numInnerHits); - for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - mockSearchPhaseContext.sendSearchResponse( - new SearchResponseSections(collapsedHits.get(innerHitNum), null, null, false, null, null, 1), - null + List mSearchResponses = new ArrayList<>(numInnerHits); + for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { + mockSearchPhaseContext.sendSearchResponse( + new SearchResponseSections(collapsedHits.get(innerHitNum), null, null, false, null, null, 1), + null + ); + mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); + } + + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse(mSearchResponses.toArray(new MultiSearchResponse.Item[0]), randomIntBetween(1, 10000)) ); - mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); } + }; + + SearchHit hit = new SearchHit(1, "ID"); + hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); + SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + } + }); + + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get(); + assertNotNull(theResponse); + assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size()); - ActionListener.respondAndRelease( - listener, - new MultiSearchResponse(mSearchResponses.toArray(new MultiSearchResponse.Item[0]), randomIntBetween(1, 10000)) + for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { + assertSame( + theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum), + collapsedHits.get(innerHitNum) ); } - }; - SearchHit hit = new SearchHit(1, "ID"); - hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); - SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + assertTrue(executedMultiSearch.get()); + } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); } - }); - - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get(); - assertNotNull(theResponse); - assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size()); - - for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - assertSame(theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum), collapsedHits.get(innerHitNum)); } - - assertTrue(executedMultiSearch.get()); } } @@ -194,95 +206,116 @@ public void run() { public void testSkipPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { - fail("no collapsing here"); - } - }; + try { + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + fail("no collapsing here"); + } + }; - SearchHit hit1 = new SearchHit(1, "ID"); - hit1.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); - SearchHit hit2 = new SearchHit(2, "ID2"); - hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); - SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + SearchHit hit1 = new SearchHit(1, "ID"); + hit1.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); + SearchHit hit2 = new SearchHit(2, "ID2"); + hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); + SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + } + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); } - }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } } public void testSkipExpandCollapseNoHits() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { - fail("expand should not try to send empty multi search request"); - } - }; - mockSearchPhaseContext.getRequest() - .source( - new SearchSourceBuilder().collapse( - new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz")) - ) - ); + try { + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + fail("expand should not try to send empty multi search request"); + } + }; + mockSearchPhaseContext.getRequest() + .source( + new SearchSourceBuilder().collapse( + new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz")) + ) + ); - SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + } + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); } - }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } } public void testExpandRequestOptions() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - boolean version = randomBoolean(); - final boolean seqNoAndTerm = randomBoolean(); + try { + boolean version = randomBoolean(); + final boolean seqNoAndTerm = randomBoolean(); - mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { - final QueryBuilder postFilter = QueryBuilders.existsQuery("foo"); - assertTrue(request.requests().stream().allMatch((r) -> "foo".equals(r.preference()))); - assertTrue(request.requests().stream().allMatch((r) -> "baz".equals(r.routing()))); - assertTrue(request.requests().stream().allMatch((r) -> version == r.source().version())); - assertTrue(request.requests().stream().allMatch((r) -> seqNoAndTerm == r.source().seqNoAndPrimaryTerm())); - assertTrue(request.requests().stream().allMatch((r) -> postFilter.equals(r.source().postFilter()))); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().fetchSource() == false)); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().includes().length == 0)); - assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().excludes().length == 0)); - } - }; - mockSearchPhaseContext.getRequest() - .source( - new SearchSourceBuilder().collapse( - new CollapseBuilder("someField").setInnerHits( - new InnerHitBuilder().setName("foobarbaz").setVersion(version).setSeqNoAndPrimaryTerm(seqNoAndTerm) - ) - ).fetchSource(false).postFilter(QueryBuilders.existsQuery("foo")) - ) - .preference("foobar") - .routing("baz"); + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + final QueryBuilder postFilter = QueryBuilders.existsQuery("foo"); + assertTrue(request.requests().stream().allMatch((r) -> "foo".equals(r.preference()))); + assertTrue(request.requests().stream().allMatch((r) -> "baz".equals(r.routing()))); + assertTrue(request.requests().stream().allMatch((r) -> version == r.source().version())); + assertTrue(request.requests().stream().allMatch((r) -> seqNoAndTerm == r.source().seqNoAndPrimaryTerm())); + assertTrue(request.requests().stream().allMatch((r) -> postFilter.equals(r.source().postFilter()))); + assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().fetchSource() == false)); + assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().includes().length == 0)); + assertTrue(request.requests().stream().allMatch((r) -> r.source().fetchSource().excludes().length == 0)); + } + }; + mockSearchPhaseContext.getRequest() + .source( + new SearchSourceBuilder().collapse( + new CollapseBuilder("someField").setInnerHits( + new InnerHitBuilder().setName("foobarbaz").setVersion(version).setSeqNoAndPrimaryTerm(seqNoAndTerm) + ) + ).fetchSource(false).postFilter(QueryBuilders.existsQuery("foo")) + ) + .preference("foobar") + .routing("baz"); - SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + SearchHits hits = SearchHits.empty(new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + } + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); } - }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index 2cc13405c2b33..a5c0c59867627 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -33,179 +33,197 @@ public class FetchLookupFieldsPhaseTests extends ESTestCase { public void testNoLookupField() { MockSearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1); - searchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { - throw new AssertionError("No lookup field"); + try { + searchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { + throw new AssertionError("No lookup field"); + } + }; + int numHits = randomIntBetween(0, 10); + SearchHit[] searchHits = new SearchHit[randomIntBetween(0, 10)]; + for (int i = 0; i < searchHits.length; i++) { + searchHits[i] = SearchHitTests.createTestItem(randomBoolean(), randomBoolean()); + } + SearchHits hits = new SearchHits(searchHits, new TotalHits(numHits, TotalHits.Relation.EQUAL_TO), 1.0f); + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( + searchPhaseContext, + new SearchResponseSections(hits, null, null, false, null, null, 1), + null + ); + phase.run(); + searchPhaseContext.assertNoFailure(); + assertNotNull(searchPhaseContext.searchResponse.get()); + } finally { + var resp = searchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); } - }; - int numHits = randomIntBetween(0, 10); - SearchHit[] searchHits = new SearchHit[randomIntBetween(0, 10)]; - for (int i = 0; i < searchHits.length; i++) { - searchHits[i] = SearchHitTests.createTestItem(randomBoolean(), randomBoolean()); } - SearchHits hits = new SearchHits(searchHits, new TotalHits(numHits, TotalHits.Relation.EQUAL_TO), 1.0f); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( - searchPhaseContext, - new SearchResponseSections(hits, null, null, false, null, null, 1), - null - ); - phase.run(); - searchPhaseContext.assertNoFailure(); - assertNotNull(searchPhaseContext.searchResponse.get()); } public void testBasic() { MockSearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1); - final AtomicBoolean requestSent = new AtomicBoolean(); - searchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { - @Override - void sendExecuteMultiSearch( - MultiSearchRequest multiSearchRequest, - SearchTask task, - ActionListener listener - ) { - assertTrue(requestSent.compareAndSet(false, true)); - // send 4 requests for term_1, term_2, term_3, and unknown - assertThat(multiSearchRequest.requests(), hasSize(4)); - for (SearchRequest r : multiSearchRequest.requests()) { - assertNotNull(r.source()); - assertThat(r.source().query(), instanceOf(TermQueryBuilder.class)); - assertThat(r.source().size(), equalTo(1)); - } - final List queryTerms = multiSearchRequest.requests().stream().map(r -> { - final TermQueryBuilder query = (TermQueryBuilder) r.source().query(); - return query.value().toString(); - }).sorted().toList(); - assertThat(queryTerms, equalTo(List.of("term_1", "term_2", "term_3", "xyz"))); - final MultiSearchResponse.Item[] responses = new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; - for (int i = 0; i < responses.length; i++) { - final SearchRequest r = multiSearchRequest.requests().get(i); - final TermQueryBuilder query = (TermQueryBuilder) r.source().query(); - final Map> fields = switch (query.value().toString()) { - case "term_1" -> Map.of("field_a", List.of("a1", "a2"), "field_b", List.of("b2")); - case "term_2" -> Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1")); - case "term_3" -> Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2")); - case "xyz" -> null; - default -> throw new AssertionError("unknown term value"); - }; - final SearchHits searchHits; - if (fields != null) { - final SearchHit hit = new SearchHit(randomInt(1000)); - fields.forEach((f, values) -> hit.setDocumentField(f, new DocumentField(f, values, List.of()))); - searchHits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - } else { - searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f); + try { + final AtomicBoolean requestSent = new AtomicBoolean(); + searchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { + @Override + void sendExecuteMultiSearch( + MultiSearchRequest multiSearchRequest, + SearchTask task, + ActionListener listener + ) { + assertTrue(requestSent.compareAndSet(false, true)); + // send 4 requests for term_1, term_2, term_3, and unknown + assertThat(multiSearchRequest.requests(), hasSize(4)); + for (SearchRequest r : multiSearchRequest.requests()) { + assertNotNull(r.source()); + assertThat(r.source().query(), instanceOf(TermQueryBuilder.class)); + assertThat(r.source().size(), equalTo(1)); } - responses[i] = new MultiSearchResponse.Item( - new SearchResponse( - searchHits, - null, - null, - false, - null, - null, - 1, - null, - 1, - 1, - 0, - randomNonNegativeLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponseTests.randomClusters(), + final List queryTerms = multiSearchRequest.requests().stream().map(r -> { + final TermQueryBuilder query = (TermQueryBuilder) r.source().query(); + return query.value().toString(); + }).sorted().toList(); + assertThat(queryTerms, equalTo(List.of("term_1", "term_2", "term_3", "xyz"))); + final MultiSearchResponse.Item[] responses = new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; + for (int i = 0; i < responses.length; i++) { + final SearchRequest r = multiSearchRequest.requests().get(i); + final TermQueryBuilder query = (TermQueryBuilder) r.source().query(); + final Map> fields = switch (query.value().toString()) { + case "term_1" -> Map.of("field_a", List.of("a1", "a2"), "field_b", List.of("b2")); + case "term_2" -> Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1")); + case "term_3" -> Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2")); + case "xyz" -> null; + default -> throw new AssertionError("unknown term value"); + }; + final SearchHits searchHits; + if (fields != null) { + final SearchHit hit = new SearchHit(randomInt(1000)); + fields.forEach((f, values) -> hit.setDocumentField(f, new DocumentField(f, values, List.of()))); + searchHits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); + } else { + searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f); + } + responses[i] = new MultiSearchResponse.Item( + new SearchResponse( + searchHits, + null, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + randomNonNegativeLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponseTests.randomClusters(), + null + ), null - ), - null - ); + ); + } + ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong())); } - ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong())); - } - }; + }; - SearchHit leftHit0 = new SearchHit(randomInt(100)); - final List fetchFields = List.of(new FieldAndFormat(randomAlphaOfLength(10), null)); - { - leftHit0.setDocumentField( - "lookup_field_1", - new DocumentField( + SearchHit leftHit0 = new SearchHit(randomInt(100)); + final List fetchFields = List.of(new FieldAndFormat(randomAlphaOfLength(10), null)); + { + leftHit0.setDocumentField( "lookup_field_1", - List.of(), - List.of(), - List.of( - new LookupField("test_index", new TermQueryBuilder("test_field", "term_1"), fetchFields, 1), - new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1) + new DocumentField( + "lookup_field_1", + List.of(), + List.of(), + List.of( + new LookupField("test_index", new TermQueryBuilder("test_field", "term_1"), fetchFields, 1), + new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1) + ) ) - ) - ); - leftHit0.setDocumentField( - "lookup_field_2", - new DocumentField( + ); + leftHit0.setDocumentField( "lookup_field_2", - List.of(), - List.of(), - List.of(new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1)) - ) - ); - } + new DocumentField( + "lookup_field_2", + List.of(), + List.of(), + List.of(new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1)) + ) + ); + } - SearchHit leftHit1 = new SearchHit(randomInt(100)); - { - leftHit1.setDocumentField( - "lookup_field_2", - new DocumentField( + SearchHit leftHit1 = new SearchHit(randomInt(100)); + { + leftHit1.setDocumentField( "lookup_field_2", - List.of(), - List.of(), - List.of( - new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1), - new LookupField("test_index", new TermQueryBuilder("test_field", "xyz"), fetchFields, 1) + new DocumentField( + "lookup_field_2", + List.of(), + List.of(), + List.of( + new LookupField("test_index", new TermQueryBuilder("test_field", "term_2"), fetchFields, 1), + new LookupField("test_index", new TermQueryBuilder("test_field", "xyz"), fetchFields, 1) + ) ) - ) - ); - leftHit1.setDocumentField( - "lookup_field_3", - new DocumentField( + ); + leftHit1.setDocumentField( "lookup_field_3", - List.of(), - List.of(), - List.of(new LookupField("test_index", new TermQueryBuilder("test_field", "term_3"), fetchFields, 1)) + new DocumentField( + "lookup_field_3", + List.of(), + List.of(), + List.of(new LookupField("test_index", new TermQueryBuilder("test_field", "term_3"), fetchFields, 1)) + ) + ); + } + SearchHits searchHits = new SearchHits( + new SearchHit[] { leftHit0, leftHit1 }, + new TotalHits(2, TotalHits.Relation.EQUAL_TO), + 1.0f + ); + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( + searchPhaseContext, + new SearchResponseSections(searchHits, null, null, false, null, null, 1), + null + ); + phase.run(); + assertTrue(requestSent.get()); + searchPhaseContext.assertNoFailure(); + assertNotNull(searchPhaseContext.searchResponse.get()); + assertSame(searchPhaseContext.searchResponse.get().getHits().getHits()[0], leftHit0); + assertSame(searchPhaseContext.searchResponse.get().getHits().getHits()[1], leftHit1); + assertFalse(leftHit0.hasLookupFields()); + assertThat( + leftHit0.field("lookup_field_1").getValues(), + containsInAnyOrder( + Map.of("field_a", List.of("a1", "a2"), "field_b", List.of("b2")), + Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1")) ) ); - } - SearchHits searchHits = new SearchHits(new SearchHit[] { leftHit0, leftHit1 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0f); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( - searchPhaseContext, - new SearchResponseSections(searchHits, null, null, false, null, null, 1), - null - ); - phase.run(); - assertTrue(requestSent.get()); - searchPhaseContext.assertNoFailure(); - assertNotNull(searchPhaseContext.searchResponse.get()); - assertSame(searchPhaseContext.searchResponse.get().getHits().getHits()[0], leftHit0); - assertSame(searchPhaseContext.searchResponse.get().getHits().getHits()[1], leftHit1); - assertFalse(leftHit0.hasLookupFields()); - assertThat( - leftHit0.field("lookup_field_1").getValues(), - containsInAnyOrder( - Map.of("field_a", List.of("a1", "a2"), "field_b", List.of("b2")), - Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1")) - ) - ); - assertThat( - leftHit0.field("lookup_field_2").getValues(), - contains(Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1"))) - ); + assertThat( + leftHit0.field("lookup_field_2").getValues(), + contains(Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1"))) + ); - assertFalse(leftHit1.hasLookupFields()); - assertThat( - leftHit1.field("lookup_field_2").getValues(), - contains(Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1"))) - ); - assertThat( - leftHit1.field("lookup_field_3").getValues(), - contains(Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2"))) - ); + assertFalse(leftHit1.hasLookupFields()); + assertThat( + leftHit1.field("lookup_field_2").getValues(), + contains(Map.of("field_a", List.of("a2", "a3"), "field_b", List.of("b1"))) + ); + assertThat( + leftHit1.field("lookup_field_3").getValues(), + contains(Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2"))) + ); + } finally { + var resp = searchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } + } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 24b2610c8d190..4594810da575a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -122,6 +122,10 @@ public void run() { assertProfiles(profiled, 1, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } } @@ -250,6 +254,10 @@ public void run() { assertProfiles(profiled, 2, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } } @@ -374,6 +382,10 @@ public void run() { } assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx)); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } } @@ -489,6 +501,10 @@ public void run() { mockSearchPhaseContext.releasedSearchContexts.size() ); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } } @@ -600,6 +616,10 @@ public void run() { assertThat(mockSearchPhaseContext.searchResponse.get().getShardFailures(), arrayWithSize(1)); assertThat(mockSearchPhaseContext.releasedSearchContexts, hasSize(1)); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } } @@ -716,6 +736,10 @@ public void run() { assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1)); } finally { + var resp = mockSearchPhaseContext.searchResponse.get(); + if (resp != null) { + resp.decRef(); + } results.decRef(); } diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index df3d4d76a14ee..1a510058e3bbd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -85,7 +85,7 @@ public OriginalIndices getOriginalIndices(int shardIndex) { public void sendSearchResponse(SearchResponseSections internalSearchResponse, AtomicArray queryResults) { String scrollId = getRequest().scroll() != null ? TransportSearchHelper.buildScrollId(queryResults) : null; String searchContextId = getRequest().pointInTimeBuilder() != null ? TransportSearchHelper.buildScrollId(queryResults) : null; - searchResponse.set( + var existing = searchResponse.getAndSet( new SearchResponse( internalSearchResponse, scrollId, @@ -98,6 +98,9 @@ public void sendSearchResponse(SearchResponseSections internalSearchResponse, At searchContextId ) ); + if (existing != null) { + existing.decRef(); + } } @Override diff --git a/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java b/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java index 54e6583d5f483..7d1e4c4c3d0de 100644 --- a/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java +++ b/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java @@ -45,7 +45,7 @@ public void testSeekTrackerPlugin() throws InterruptedException { } indexRandom(true, docs); - prepareSearch("index").setQuery(QueryBuilders.termQuery("field", "term2")).get(); + prepareSearch("index").setQuery(QueryBuilders.termQuery("field", "term2")).get().decRef(); SeekStatsResponse response = client().execute(SeekTrackerPlugin.SEEK_STATS_ACTION, new SeekStatsRequest("index")).actionGet(); List shardSeekStats = response.getSeekStats().get("index"); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index d0d2b99ff02a5..95de6e3ab2027 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; @@ -951,12 +952,9 @@ public void testResumeDownsamplePartial() throws IOException { ); final DownsampleIndexerAction.ShardDownsampleResponse response2 = indexer.execute(); - int dim2DocCount = client().prepareSearch(sourceIndex) - .setQuery(new TermQueryBuilder(FIELD_DIMENSION_1, "dim2")) - .setSize(10_000) - .get() - .getHits() - .getHits().length; + long dim2DocCount = SearchResponseUtils.getTotalHitsValue( + client().prepareSearch(sourceIndex).setQuery(new TermQueryBuilder(FIELD_DIMENSION_1, "dim2")).setSize(10_000) + ); assertDownsampleIndexer(indexService, shardNum, task, response2, dim2DocCount); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index 005e39759b7e1..67f923769ffe3 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -353,7 +353,8 @@ protected void searchFailures.add(new ShardSearchFailure(new RuntimeException("shard failed"))); } } - listener.onResponse( + ActionListener.respondAndRelease( + listener, (Response) new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, null, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index d2b38f4b11ef8..6775dca424bf1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -167,12 +167,15 @@ public Collection findTriggeredWatches(Collection watches } SearchScrollRequest request = new SearchScrollRequest(response.getScrollId()); request.scroll(scrollTimeout); + response.decRef(); response = client.searchScroll(request).actionGet(defaultSearchTimeout); } } finally { if (response != null) { + final String scrollId = response.getScrollId(); + response.decRef(); ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(response.getScrollId()); + clearScrollRequest.addScrollId(scrollId); client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 724c28c4bf50d..60fa2581b4218 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -258,7 +258,7 @@ public void testFindTriggeredWatchesGoodCase() { ) ); } else if (request.scrollId().equals("_scrollId1")) { - listener.onResponse(SearchResponseUtils.emptyWithTotalHits("_scrollId2", 1, 1, 0, 1, null, null)); + ActionListener.respondAndRelease(listener, SearchResponseUtils.emptyWithTotalHits("_scrollId2", 1, 1, 0, 1, null, null)); } else { listener.onFailure(new ElasticsearchException("test issue")); } From f2c939362c2bfa457bb989f81a2d322f0549b0bc Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 8 Jan 2024 18:28:04 +0000 Subject: [PATCH 06/47] [ML] Retry updates to model snapshot ID on job config (#104077) When an autodetect process produces a new model snapshot, we update the model snapshot ID on the corresponding job config. This means that the next time the job restarts it will load the most recent model snapshot. We have seen a few occasions where this update to the job config has failed. Often this happens because the .ml-config index is temporarily inaccessible. This change adds a few retries of the config update. It is not completely catastrophic if we fail to update the model snapshot ID on the job config - it just means that the job will pick up from an earlier snapshot and redo work if it has to restart for any reason. But it's still better to attempt a few retries before giving up. --- docs/changelog/104077.yaml | 5 + .../output/AutodetectResultProcessor.java | 32 ++-- .../RetryableUpdateModelSnapshotAction.java | 76 ++++++++ .../AutodetectResultProcessorTests.java | 2 + ...tryableUpdateModelSnapshotActionTests.java | 172 ++++++++++++++++++ 5 files changed, 274 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/104077.yaml create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotAction.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotActionTests.java diff --git a/docs/changelog/104077.yaml b/docs/changelog/104077.yaml new file mode 100644 index 0000000000000..7550e7388a29d --- /dev/null +++ b/docs/changelog/104077.yaml @@ -0,0 +1,5 @@ +pr: 104077 +summary: Retry updates to model snapshot ID on job config +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java index 459e6f6dee4bd..ae6e21156fdcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java @@ -58,8 +58,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.JOB_FORECAST_NATIVE_PROCESS_KILLED; /** @@ -506,19 +504,27 @@ protected void updateModelSnapshotOnJob(ModelSnapshot modelSnapshot) { return; } - executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, new ActionListener() { - @Override - public void onResponse(PutJobAction.Response response) { - updateModelSnapshotSemaphore.release(); - logger.debug("[{}] Updated job with model snapshot id [{}]", jobId, modelSnapshot.getSnapshotId()); - } + RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction( + client, + updateRequest, + new ActionListener<>() { + @Override + public void onResponse(PutJobAction.Response response) { + updateModelSnapshotSemaphore.release(); + logger.debug("[{}] Updated job with model snapshot id [{}]", jobId, modelSnapshot.getSnapshotId()); + } - @Override - public void onFailure(Exception e) { - updateModelSnapshotSemaphore.release(); - logger.error("[" + jobId + "] Failed to update job with new model snapshot id [" + modelSnapshot.getSnapshotId() + "]", e); + @Override + public void onFailure(Exception e) { + updateModelSnapshotSemaphore.release(); + logger.error( + "[" + jobId + "] Failed to update job with new model snapshot id [" + modelSnapshot.getSnapshotId() + "]", + e + ); + } } - }); + ); + updateModelSnapshotAction.run(); } public void awaitCompletion() throws TimeoutException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotAction.java new file mode 100644 index 0000000000000..8823eaaa2032f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotAction.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.job.process.autodetect.output; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RetryableAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; +import org.elasticsearch.xpack.ml.MachineLearning; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +/** + * Class to retry updates to the model snapshot ID on the job config after a new model snapshot result + * is seen. Prior to the introduction of this functionality we saw cases where this particular job config + * update would fail, so that the job would have persisted a perfectly valid model snapshot and yet it + * would not be used if the job failed over to another node, leading to wasted work rerunning from an + * older snapshot. + */ +public class RetryableUpdateModelSnapshotAction extends RetryableAction { + + private static final Logger logger = LogManager.getLogger(RetryableUpdateModelSnapshotAction.class); + + private final Client client; + private final UpdateJobAction.Request updateRequest; + private volatile boolean hasFailedAtLeastOnce; + + public RetryableUpdateModelSnapshotAction( + Client client, + UpdateJobAction.Request updateRequest, + ActionListener listener + ) { + super( + logger, + client.threadPool(), + // First retry after 15 seconds + TimeValue.timeValueSeconds(15), + // Never wait more than 2 minutes between retries + TimeValue.timeValueMinutes(2), + // Retry for 5 minutes in total. If the node is shutting down then we cannot wait longer than 10 + // minutes, and there is other work to do as well. If the node is not shutting down then persisting + // the snapshot is less important, as we'll try again if the node does shut down. Therefore, 5 minutes + // is a reasonable compromise between preventing excess rework on failover and delaying processing + // unnecessarily. + TimeValue.timeValueMinutes(5), + listener, + client.threadPool().executor(MachineLearning.UTILITY_THREAD_POOL_NAME) + ); + this.client = client; + this.updateRequest = updateRequest; + } + + @Override + public void tryAction(ActionListener listener) { + executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, listener); + } + + @Override + public boolean shouldRetry(Exception e) { + if (hasFailedAtLeastOnce == false) { + hasFailedAtLeastOnce = true; + logger.warn(() -> "[" + updateRequest.getJobId() + "] Failed to update job with new model snapshot id; attempting retry", e); + } + return true; + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java index ed050a99cd16d..cc4491ff5fffc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java @@ -63,6 +63,7 @@ import java.util.List; import java.util.concurrent.ScheduledThreadPoolExecutor; +import static org.elasticsearch.common.util.concurrent.EsExecutors.DIRECT_EXECUTOR_SERVICE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -102,6 +103,7 @@ public void setUpMocks() { executor = new Scheduler.SafeScheduledThreadPoolExecutor(1); client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(any())).thenReturn(DIRECT_EXECUTOR_SERVICE); when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); auditor = mock(AnomalyDetectionAuditor.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotActionTests.java new file mode 100644 index 0000000000000..a073da32f3085 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/RetryableUpdateModelSnapshotActionTests.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.job.process.autodetect.output; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; +import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.junit.Before; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.common.util.concurrent.EsExecutors.DIRECT_EXECUTOR_SERVICE; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RetryableUpdateModelSnapshotActionTests extends ESTestCase { + + private static final String JOB_ID = "valid_id"; + + private Client client; + + private ThreadPool threadPool; + + @Before + public void setUpMocks() { + client = mock(Client.class); + threadPool = mock(ThreadPool.class); + when(threadPool.executor(any())).thenReturn(DIRECT_EXECUTOR_SERVICE); + doAnswer(invocationOnMock -> { + Runnable runnable = (Runnable) invocationOnMock.getArguments()[0]; + runnable.run(); + return null; + }).when(threadPool).schedule(any(), any(), any()); + when(client.threadPool()).thenReturn(threadPool); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + } + + public void testFirstTimeSuccess() { + + PutJobAction.Response response = mock(PutJobAction.Response.class); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(response); + return null; + }).when(client).execute(any(), any(), any()); + + AtomicReference storedResponse = new AtomicReference<>(); + + UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build()); + RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction( + client, + updateRequest, + new ActionListener<>() { + @Override + public void onResponse(PutJobAction.Response response) { + storedResponse.set(response); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + } + ); + updateModelSnapshotAction.run(); + + verify(threadPool, never()).schedule(any(), any(), any()); + assertSame(response, storedResponse.get()); + } + + public void testRetriesNeeded() { + + int numRetries = randomIntBetween(1, 5); + + PutJobAction.Response response = mock(PutJobAction.Response.class); + AtomicInteger callCount = new AtomicInteger(0); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + if (callCount.incrementAndGet() > numRetries) { + listener.onResponse(response); + } else { + listener.onFailure(new Exception()); + } + return null; + }).when(client).execute(any(), any(), any()); + + AtomicReference storedResponse = new AtomicReference<>(); + + UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build()); + RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction( + client, + updateRequest, + new ActionListener<>() { + @Override + public void onResponse(PutJobAction.Response response) { + storedResponse.set(response); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + } + ); + updateModelSnapshotAction.run(); + + verify(threadPool, times(numRetries)).schedule(any(), any(), any()); + assertSame(response, storedResponse.get()); + } + + public void testCompleteFailure() { + + int numRetries = randomIntBetween(1, 5); + + AtomicInteger callCount = new AtomicInteger(0); + AtomicLong relativeTimeMs = new AtomicLong(0); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + if (callCount.incrementAndGet() > numRetries) { + relativeTimeMs.set(TimeValue.timeValueMinutes(5).millis() + 1); + } + listener.onFailure(new Exception(Long.toString(relativeTimeMs.get()))); + return null; + }).when(client).execute(any(), any(), any()); + doAnswer(invocationOnMock -> relativeTimeMs.get()).when(threadPool).relativeTimeInMillis(); + + AtomicReference storedFailure = new AtomicReference<>(); + + UpdateJobAction.Request updateRequest = new UpdateJobAction.Request(JOB_ID, new JobUpdate.Builder(JOB_ID).build()); + RetryableUpdateModelSnapshotAction updateModelSnapshotAction = new RetryableUpdateModelSnapshotAction( + client, + updateRequest, + new ActionListener<>() { + @Override + public void onResponse(PutJobAction.Response response) { + fail("this should not be called"); + } + + @Override + public void onFailure(Exception e) { + storedFailure.set(e); + } + } + ); + updateModelSnapshotAction.run(); + + verify(threadPool, times(numRetries)).schedule(any(), any(), any()); + assertEquals(Long.toString(relativeTimeMs.get()), storedFailure.get().getMessage()); + } +} From 2279d75b9d5686f5cda05e656b2fbbec976f3598 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 8 Jan 2024 13:34:57 -0500 Subject: [PATCH 07/47] Bump geoip2 and maxmind-db (#104064) --- gradle/verification-metadata.xml | 12 ++++++------ modules/ingest-geoip/build.gradle | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e92bc287c9ca1..3c7c537afe5bf 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -811,14 +811,14 @@ - - - + + + - - - + + + diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index c44832ef7e2ff..f755a27b478cc 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -27,12 +27,12 @@ tasks.named('internalClusterTestTestingConventions').configure { } dependencies { - implementation('com.maxmind.geoip2:geoip2:4.0.0') + implementation('com.maxmind.geoip2:geoip2:4.2.0') // geoip2 dependencies: runtimeOnly("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") runtimeOnly("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") runtimeOnly("com.fasterxml.jackson.core:jackson-core:${versions.jackson}") - implementation('com.maxmind.db:maxmind-db:3.0.0') + implementation('com.maxmind.db:maxmind-db:3.1.0') testImplementation 'org.elasticsearch:geolite2-databases:20191119' internalClusterTestImplementation project(':modules:reindex') From a359b1f6483020af0fa61b8403b4c18936b7e865 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Mon, 8 Jan 2024 13:31:54 -0600 Subject: [PATCH 08/47] Relax limit on max string size in CBOR, Smile, YAML (#103930) Remove the rough limit on string length from Jackson 2.15. The limit was already relaxed for JSON in #96031, this extends that change to other XContent types. Refs: #96031 Fixes: #104009 --- .../xcontent/provider/XContentImplUtils.java | 21 +++++++++++++++++++ .../provider/cbor/CborXContentImpl.java | 3 ++- .../provider/json/JsonXContentImpl.java | 9 ++------ .../provider/smile/SmileXContentImpl.java | 3 ++- .../provider/yaml/YamlXContentImpl.java | 7 ++++++- 5 files changed, 33 insertions(+), 10 deletions(-) create mode 100644 libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentImplUtils.java diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentImplUtils.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentImplUtils.java new file mode 100644 index 0000000000000..3e3fc12f9c16a --- /dev/null +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/XContentImplUtils.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xcontent.provider; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.core.TSFBuilder; + +public class XContentImplUtils { + public static > F configure(TSFBuilder builder) { + // jackson 2.15 introduced a max string length. We have other limits in place to constrain max doc size, + // so here we set to max value (2GiB) so as not to constrain further than those existing limits. + return builder.streamReadConstraints(StreamReadConstraints.builder().maxStringLength(Integer.MAX_VALUE).build()).build(); + } +} diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/cbor/CborXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/cbor/CborXContentImpl.java index 1b2a6d02822ba..2a8e7a4dfa12c 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/cbor/CborXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/cbor/CborXContentImpl.java @@ -21,6 +21,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.provider.XContentImplUtils; import java.io.IOException; import java.io.InputStream; @@ -45,7 +46,7 @@ public static XContent cborXContent() { } static { - cborFactory = new CBORFactory(); + cborFactory = XContentImplUtils.configure(CBORFactory.builder()); cborFactory.configure(CBORFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now... // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java index cbd3e7378b6df..2e4925b4a853e 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentImpl.java @@ -13,7 +13,6 @@ import com.fasterxml.jackson.core.JsonFactoryBuilder; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.StreamReadConstraints; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,6 +20,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.provider.XContentImplUtils; import java.io.IOException; import java.io.InputStream; @@ -46,12 +46,7 @@ public static final XContent jsonXContent() { } static { - var builder = new JsonFactoryBuilder(); - // jackson 2.15 introduced a max string length. We have other limits in place to constrain max doc size, - // so here we set to max value (2GiB) so as not to constrain further than those existing limits. - builder.streamReadConstraints(StreamReadConstraints.builder().maxStringLength(Integer.MAX_VALUE).build()); - - jsonFactory = builder.build(); + jsonFactory = XContentImplUtils.configure(new JsonFactoryBuilder()); jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true); jsonFactory.configure(JsonParser.Feature.ALLOW_COMMENTS, true); jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now... diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/smile/SmileXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/smile/SmileXContentImpl.java index 46f8aeec20f29..3c774c582c638 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/smile/SmileXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/smile/SmileXContentImpl.java @@ -21,6 +21,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.provider.XContentImplUtils; import java.io.IOException; import java.io.InputStream; @@ -45,7 +46,7 @@ public static XContent smileXContent() { } static { - smileFactory = new SmileFactory(); + smileFactory = XContentImplUtils.configure(SmileFactory.builder()); // for now, this is an overhead, might make sense for web sockets smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now... diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/yaml/YamlXContentImpl.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/yaml/YamlXContentImpl.java index 2d59b9588ab38..6a22508ba51c6 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/yaml/YamlXContentImpl.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/yaml/YamlXContentImpl.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -18,6 +19,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.provider.XContentImplUtils; import java.io.IOException; import java.io.InputStream; @@ -42,7 +44,10 @@ public static XContent yamlXContent() { } static { - yamlFactory = new YAMLFactory(); + yamlFactory = XContentImplUtils.configure(YAMLFactory.builder()); + // YAMLFactory.builder() differs from new YAMLFactory() in that builder() does not set the default yaml parser feature flags. + // So set the only default feature flag, EMPTY_STRING_AS_NULL, here. + yamlFactory.configure(YAMLParser.Feature.EMPTY_STRING_AS_NULL, true); yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); yamlFactory.configure(JsonParser.Feature.USE_FAST_DOUBLE_PARSER, true); yamlXContent = new YamlXContentImpl(); From 2f5247117e80df1494067cef6b6eb444c231ba4b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 8 Jan 2024 12:03:40 -0800 Subject: [PATCH 09/47] Upgrade ASM to 9.6 for Java 22 support (#104085) This commit upgrades the version of asm used by the build and plugins in order to support Java 22 version format. closes #104065 relates #103963 --- build-tools-internal/build.gradle | 2 +- distribution/tools/plugin-cli/build.gradle | 4 ++-- gradle/verification-metadata.xml | 10 ++++++++++ libs/plugin-scanner/build.gradle | 4 ++-- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index a3b41283764a1..a4e0e2389dbec 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -255,7 +255,7 @@ dependencies { // ensuring brought asm version brought in by spock is up-to-date testImplementation buildLibs.asm integTestImplementation buildLibs.asm - integTestImplementation('org.ow2.asm:asm:9.5') + integTestImplementation('org.ow2.asm:asm:9.6') api("org.yaml:snakeyaml") { version { strictly(versions.snakeyaml) } } diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 3859dfa1ddbb9..e55e8ec39654e 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -23,8 +23,8 @@ dependencies { compileOnly project(":libs:elasticsearch-cli") implementation project(":libs:elasticsearch-plugin-api") implementation project(":libs:elasticsearch-plugin-scanner") - implementation 'org.ow2.asm:asm:9.5' - implementation 'org.ow2.asm:asm-tree:9.5' + implementation 'org.ow2.asm:asm:9.6' + implementation 'org.ow2.asm:asm-tree:9.6' api "org.bouncycastle:bcpg-fips:1.0.7.1" api "org.bouncycastle:bc-fips:1.0.2.4" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3c7c537afe5bf..24b81106dcea3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -4132,6 +4132,11 @@ + + + + + @@ -4197,6 +4202,11 @@ + + + + + diff --git a/libs/plugin-scanner/build.gradle b/libs/plugin-scanner/build.gradle index d7138363e09fa..fbe9c02092577 100644 --- a/libs/plugin-scanner/build.gradle +++ b/libs/plugin-scanner/build.gradle @@ -19,8 +19,8 @@ dependencies { api project(':libs:elasticsearch-plugin-api') api project(":libs:elasticsearch-x-content") - api 'org.ow2.asm:asm:9.5' - api 'org.ow2.asm:asm-tree:9.5' + api 'org.ow2.asm:asm:9.6' + api 'org.ow2.asm:asm-tree:9.6' testImplementation "junit:junit:${versions.junit}" testImplementation(project(":test:framework")) { From 6a386ba20a4611d83d67f6288f02c42ddee9f533 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 8 Jan 2024 15:05:43 -0500 Subject: [PATCH 10/47] [dra] Trigger elasticsearch-hadoop dra build whenever we build a new staging artifact (#104084) --- .buildkite/pipelines/dra-workflow.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.buildkite/pipelines/dra-workflow.yml b/.buildkite/pipelines/dra-workflow.yml index 336bb74041be3..e7bf19816356f 100644 --- a/.buildkite/pipelines/dra-workflow.yml +++ b/.buildkite/pipelines/dra-workflow.yml @@ -7,3 +7,13 @@ steps: image: family/elasticsearch-ubuntu-2204 machineType: custom-32-98304 buildDirectory: /dev/shm/bk + - wait + # The hadoop build depends on the ES artifact + # So let's trigger the hadoop build any time we build a new staging artifact + - trigger: elasticsearch-hadoop-dra-workflow + async: true + build: + branch: "${BUILDKITE_BRANCH}" + env: + DRA_WORKFLOW: staging + if: build.env('DRA_WORKFLOW') == 'staging' From cdb3439c33bcbfdab52f506b7dfdf95c9d1698e0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 8 Jan 2024 20:06:01 +0000 Subject: [PATCH 11/47] Introduce `SubscribableListener#andThenCompleteWith` (#103990) Sometimes we want a step in a `SubscribableListener` chain which is fully synchronous. It's a little awkward to write this using `andThen`, so this commit introduces a more natural utility for this situation. --- .../repositories/s3/S3BlobContainer.java | 4 +- .../action/support/SubscribableListener.java | 36 ++++++ .../recovery/RecoverySourceHandler.java | 40 +++---- .../support/SubscribableListenerTests.java | 106 +++++++++++++++++- .../slm/SLMGetExpiredSnapshotsAction.java | 5 +- .../SLMGetExpiredSnapshotsActionTests.java | 5 +- 6 files changed, 166 insertions(+), 30 deletions(-) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 93b8ef7e57389..dadd15ed640c0 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -628,7 +628,7 @@ void run(BytesReference expected, BytesReference updated, ActionListenerandThen((l, currentValue) -> ActionListener.completeWith(l, () -> { + .andThenApply(currentValue -> { if (currentValue.isPresent() && currentValue.bytesReference().equals(expected)) { logger.trace("[{}] completing upload [{}]", blobKey, uploadId); completeMultipartUpload(uploadId, partETag); @@ -638,7 +638,7 @@ void run(BytesReference expected, BytesReference updated, ActionListener SubscribableListener andThen( return newForked(l -> addListener(l.delegateFailureAndWrap(nextStep), executor, threadContext)); } + /** + * Creates and returns a new {@link SubscribableListener} {@code L} such that if this listener is completed successfully with result + * {@code R} then {@code fn} is invoked with argument {@code R}, and {@code L} is completed with the result of that invocation. If this + * listener is completed exceptionally, or {@code fn} throws an exception, then {@code L} is completed with that exception. + *

+ * This is essentially a shorthand for a call to {@link #andThen} with a {@code nextStep} argument that is fully synchronous. + *

+ * The threading of the {@code fn} invocation is the same as for listeners added with {@link #addListener}: if this listener is + * already complete then {@code fn} is invoked on the thread calling {@link #andThenApply} and in its thread context, but if this + * listener is incomplete then {@code fn} is invoked on the thread, and in the thread context, on which this listener is completed. + */ + public SubscribableListener andThenApply(CheckedFunction fn) { + return newForked(l -> addListener(l.map(fn))); + } + + /** + * Creates and returns a new {@link SubscribableListener} {@code L} such that if this listener is completed successfully with result + * {@code R} then {@code consumer} is applied to argument {@code R}, and {@code L} is completed with {@code null} when {@code + * consumer} returns. If this listener is completed exceptionally, or {@code consumer} throws an exception, then {@code L} is + * completed with that exception. + *

+ * This is essentially a shorthand for a call to {@link #andThen} with a {@code nextStep} argument that is fully synchronous. + *

+ * The threading of the {@code consumer} invocation is the same as for listeners added with {@link #addListener}: if this listener is + * already complete then {@code consumer} is invoked on the thread calling {@link #andThenAccept} and in its thread context, but if + * this listener is incomplete then {@code consumer} is invoked on the thread, and in the thread context, on which this listener is + * completed. + */ + public SubscribableListener andThenAccept(CheckedConsumer consumer) { + return newForked(l -> addListener(l.map(r -> { + consumer.accept(r); + return null; + }))); + } + /** * Adds a timeout to this listener, such that if the timeout elapses before the listener is completed then it will be completed with an * {@link ElasticsearchTimeoutException}. diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 84385ee04c2dd..618bc847e3a7f 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -565,19 +565,17 @@ void phase1(IndexCommit snapshot, long startingSeqNo, IntSupplier translogOps, A // but we must still create a retention lease .newForked(leaseListener -> createRetentionLease(startingSeqNo, leaseListener)) // and then compute the result of sending no files - .andThen((l, ignored) -> { + .andThenApply(ignored -> { final TimeValue took = stopWatch.totalTime(); logger.trace("recovery [phase1]: took [{}]", took); - l.onResponse( - new SendFileResult( - Collections.emptyList(), - Collections.emptyList(), - 0L, - Collections.emptyList(), - Collections.emptyList(), - 0L, - took - ) + return new SendFileResult( + Collections.emptyList(), + Collections.emptyList(), + 0L, + Collections.emptyList(), + Collections.emptyList(), + 0L, + took ); }) // and finally respond @@ -751,19 +749,17 @@ void run(ActionListener listener) { cleanFiles(store, recoverySourceMetadata, () -> translogOps, lastKnownGlobalCheckpoint, finalRecoveryPlanListener); }) // compute the result - .andThen((resultListener, ignored) -> { + .andThenApply(ignored -> { final TimeValue took = stopWatch.totalTime(); logger.trace("recovery [phase1]: took [{}]", took); - resultListener.onResponse( - new SendFileResult( - shardRecoveryPlan.getFilesToRecoverNames(), - shardRecoveryPlan.getFilesToRecoverSizes(), - shardRecoveryPlan.getTotalSize(), - shardRecoveryPlan.getFilesPresentInTargetNames(), - shardRecoveryPlan.getFilesPresentInTargetSizes(), - shardRecoveryPlan.getExistingSize(), - took - ) + return new SendFileResult( + shardRecoveryPlan.getFilesToRecoverNames(), + shardRecoveryPlan.getFilesToRecoverSizes(), + shardRecoveryPlan.getTotalSize(), + shardRecoveryPlan.getFilesPresentInTargetNames(), + shardRecoveryPlan.getFilesPresentInTargetSizes(), + shardRecoveryPlan.getExistingSize(), + took ); }) // and finally respond diff --git a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java index d2acf8f397f2f..d784e2ac040a1 100644 --- a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java @@ -409,6 +409,26 @@ public void testAndThenSuccess() { assertFalse(chainedListener.isDone()); } + public void testAndThenThrowException() { + final var initialListener = new SubscribableListener<>(); + final var forked = new AtomicReference>(); + final var result = new AtomicReference<>(); + + final var chainedListener = initialListener.andThen((l, o) -> { + forked.set(l); + result.set(o); + throw new ElasticsearchException("simulated"); + }); + assertNull(forked.get()); + assertNull(result.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(o1, result.get()); + assertSame(chainedListener, forked.get()); + assertComplete(chainedListener, "simulated"); + } + public void testAndThenFailure() { final var initialListener = new SubscribableListener<>(); @@ -488,7 +508,7 @@ private static void runAndThenThreadingTest(boolean testSuccess) { assertTrue(isComplete.get()); } - private static void assertComplete(SubscribableListener listener, @Nullable String expectedFailureMessage) { + private static void assertComplete(SubscribableListener listener, @Nullable String expectedFailureMessage) { assertTrue(listener.isDone()); if (expectedFailureMessage == null) { try { @@ -500,4 +520,88 @@ private static void assertComplete(SubscribableListener listener, @Nulla assertEquals(expectedFailureMessage, expectThrows(ElasticsearchException.class, listener::rawResult).getMessage()); } } + + public void testAndThenApplySuccess() throws Exception { + final var initialListener = new SubscribableListener<>(); + final var result = new AtomicReference<>(); + + final var oResult = new Object(); + final var chainedListener = initialListener.andThenApply(o -> { + result.set(o); + return oResult; + }); + assertNull(result.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(o1, result.get()); + assertTrue(chainedListener.isDone()); + assertSame(oResult, chainedListener.rawResult()); + } + + public void testAndThenApplyThrowException() { + final var initialListener = new SubscribableListener<>(); + final var result = new AtomicReference<>(); + + final var chainedListener = initialListener.andThenApply(o -> { + result.set(o); + throw new ElasticsearchException("simulated exception in fn"); + }); + assertNull(result.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(o1, result.get()); + assertComplete(chainedListener, "simulated exception in fn"); + } + + public void testAndThenApplyFailure() { + final var initialListener = new SubscribableListener<>(); + + final var chainedListener = initialListener.andThenApply(o -> fail(null, "should not be called")); + assertFalse(chainedListener.isDone()); + + initialListener.onFailure(new ElasticsearchException("simulated")); + assertComplete(chainedListener, "simulated"); + } + + public void testAndThenAcceptSuccess() throws Exception { + final var initialListener = new SubscribableListener<>(); + final var result = new AtomicReference<>(); + + final var chainedListener = initialListener.andThenAccept(result::set); + assertNull(result.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(o1, result.get()); + assertTrue(chainedListener.isDone()); + assertNull(chainedListener.rawResult()); + } + + public void testAndThenAcceptThrowException() { + final var initialListener = new SubscribableListener<>(); + final var result = new AtomicReference<>(); + + final var chainedListener = initialListener.andThenAccept(o -> { + result.set(o); + throw new ElasticsearchException("simulated exception in fn"); + }); + assertNull(result.get()); + + final var o1 = new Object(); + initialListener.onResponse(o1); + assertSame(o1, result.get()); + assertComplete(chainedListener, "simulated exception in fn"); + } + + public void testAndThenAcceptFailure() { + final var initialListener = new SubscribableListener<>(); + + final var chainedListener = initialListener.andThenAccept(o -> fail(null, "should not be called")); + assertFalse(chainedListener.isDone()); + + initialListener.onFailure(new ElasticsearchException("simulated")); + assertComplete(chainedListener, "simulated"); + } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java index 550410d1d59aa..40a93dfa11d80 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java @@ -134,13 +134,12 @@ protected void doExecute(Task task, Request request, ActionListener li ) // Compute snapshots to delete for each (relevant) policy - .andThen((l, snapshotDetailsByPolicy) -> ActionListener.completeWith(l, () -> { + .andThenAccept(snapshotDetailsByPolicy -> { resultsBuilder.addResult( repositoryName, getSnapshotsToDelete(repositoryName, request.policies(), snapshotDetailsByPolicy) ); - return null; - })) + }) // And notify this repository's listener on completion .addListener(perRepositoryListener.delegateResponse((l, e) -> { diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java index eda0e4f8ae39c..551cb33166d31 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java @@ -175,7 +175,7 @@ record SeenSnapshotInfo(SnapshotId snapshotId, String policyId) {} .map(si -> new SeenSnapshotInfo(si.snapshotId(), RepositoryData.SnapshotDetails.fromSnapshotInfo(si).getSlmPolicy())) .collect(Collectors.toSet()); - SubscribableListener + final var testListener = SubscribableListener .newForked(l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l)) @@ -183,7 +183,7 @@ record SeenSnapshotInfo(SnapshotId snapshotId, String policyId) {} (l, rd) -> SLMGetExpiredSnapshotsAction.getSnapshotDetailsByPolicy(EsExecutors.DIRECT_EXECUTOR_SERVICE, repository, rd, l) ) - .andThen((l, snapshotDetailsByPolicy) -> { + .andThenAccept(snapshotDetailsByPolicy -> { snapshotDetailsByPolicy.flatMap((policyId, snapshotsMap) -> snapshotsMap.entrySet().stream().map(entry -> { assertThat(policyId, oneOf(policyNames)); assertEquals(policyId, entry.getValue().getSlmPolicy()); @@ -192,6 +192,7 @@ record SeenSnapshotInfo(SnapshotId snapshotId, String policyId) {} }); deterministicTaskQueue.runAllTasks(); + assertTrue(testListener.isDone()); assertThat(seenSnapshotInfos, empty()); } From a5aa900358bce80d81bd02a7616ec42cd2584777 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 8 Jan 2024 21:13:20 +0100 Subject: [PATCH 12/47] ESQL: Update the use of some user-caused exceptions (#104046) This updates the use of the exceptions subclassed from `QlServerException` when the failure reason is user-caused. This ensures that a 400-class response is returned, instead of a 500-class one. --- docs/changelog/104046.yaml | 5 ++++ .../esql/functions/to_integer.asciidoc | 2 +- .../convert/ToLongFromDoubleEvaluator.java | 7 +++-- .../ToLongFromUnsignedLongEvaluator.java | 7 +++-- .../ToUnsignedLongFromDoubleEvaluator.java | 7 +++-- .../function/scalar/convert/ToLong.java | 5 ++-- .../scalar/convert/ToUnsignedLong.java | 3 +-- .../function/scalar/date/DateExtract.java | 4 +-- .../function/scalar/date/DateParse.java | 4 +-- .../expression/function/scalar/math/Cast.java | 5 ++-- .../function/scalar/string/Split.java | 14 +++++----- .../xpack/esql/parser/ExpressionBuilder.java | 8 +++--- .../AbstractPhysicalOperationProviders.java | 5 ++-- .../esql/type/EsqlDataTypeConverter.java | 5 ++-- .../scalar/date/DateExtractTests.java | 19 +++++++++++++ .../function/scalar/date/DateParseTests.java | 20 ++++++++++++++ .../function/scalar/string/SplitTests.java | 27 ++++++++++++------- .../xpack/esql/parser/ExpressionTests.java | 5 ++++ 18 files changed, 104 insertions(+), 48 deletions(-) create mode 100644 docs/changelog/104046.yaml diff --git a/docs/changelog/104046.yaml b/docs/changelog/104046.yaml new file mode 100644 index 0000000000000..9b383611b560a --- /dev/null +++ b/docs/changelog/104046.yaml @@ -0,0 +1,5 @@ +pr: 104046 +summary: "ESQL: Update the use of some user-caused exceptions" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/to_integer.asciidoc b/docs/reference/esql/functions/to_integer.asciidoc index e185b87d6d95d..e62256930c5aa 100644 --- a/docs/reference/esql/functions/to_integer.asciidoc +++ b/docs/reference/esql/functions/to_integer.asciidoc @@ -26,7 +26,7 @@ provide information on the source of the failure: A following header will contain the failure reason and the offending value: -`"org.elasticsearch.xpack.ql.QlIllegalArgumentException: [501379200000] out of [integer] range"` +`"org.elasticsearch.xpack.ql.InvalidArgumentException: [501379200000] out of [integer] range"` If the input parameter is of a date type, its value will be interpreted as diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index b8b86f1d6cbf1..03daa257e5af2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index 41f8980581073..b5999d1a4e1ab 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -13,7 +13,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -38,7 +37,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -47,7 +46,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -81,7 +80,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index 6d57bbd978370..6a45dcf907889 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 87b35218ab2ac..06f56e81fc50d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -102,12 +101,12 @@ static long fromKeyword(BytesRef in) { } } - @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static long fromDouble(double dbl) { return safeDoubleToLong(dbl); } - @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) static long fromUnsignedLong(long ul) { return safeToLong(unsignedLongAsNumber(ul)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index cfa24cd6d8ff8..651259db06054 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -86,7 +85,7 @@ static long fromKeyword(BytesRef in) { return asLongUnsigned(safeToUnsignedLong(asString)); } - @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static long fromDouble(double dbl) { return asLongUnsigned(safeToUnsignedLong(dbl)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index c5d5dc5054653..eadea746a1bd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -12,8 +12,8 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.function.scalar.ConfigurationFunction; @@ -49,7 +49,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new BytesRef()); } BytesRef delim = (BytesRef) right().fold(); + checkDelimiter(delim); + return new SplitSingleByteEvaluator.Factory(source(), str, delim.bytes[delim.offset], context -> new BytesRef()); + } + + private static void checkDelimiter(BytesRef delim) { if (delim.length != 1) { - throw new QlIllegalArgumentException("for now delimiter must be a single byte"); + throw new InvalidArgumentException("delimiter must be single byte for now"); } - return new SplitSingleByteEvaluator.Factory(source(), str, delim.bytes[delim.offset], context -> new BytesRef()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 3b1ef475350b1..9875979808f0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -103,9 +103,9 @@ public Literal visitDecimalValue(EsqlBaseParser.DecimalValueContext ctx) { String text = ctx.getText(); try { - return new Literal(source, Double.valueOf(StringUtils.parseDouble(text)), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException siae) { - throw new ParsingException(source, siae.getMessage()); + return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); + } catch (InvalidArgumentException iae) { + throw new ParsingException(source, iae.getMessage()); } } @@ -121,7 +121,7 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { // if it's too large, then quietly try to parse as a float instead try { return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); - } catch (QlIllegalArgumentException ignored) {} + } catch (InvalidArgumentException ignored) {} throw new ParsingException(source, siae.getMessage()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index a41b6fdd52f4c..69e80a433f2d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; @@ -236,10 +237,10 @@ private void aggregatesToFactory( Expression field = aggregateFunction.field(); // Only count can now support literals - all the other aggs should be optimized away if (field.foldable()) { - if (aggregateFunction instanceof Count count) { + if (aggregateFunction instanceof Count) { sourceAttr = emptyList(); } else { - throw new EsqlIllegalArgumentException( + throw new InvalidArgumentException( "Does not support yet aggregations over constants - [{}]", aggregateFunction.sourceText() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 192d5e43f9366..eba80ff238a45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.parser.ParsingException; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.Converter; @@ -130,8 +131,8 @@ public static DataType commonType(DataType left, DataType right) { return DataTypeConverter.commonType(left, right); } - public static TemporalAmount parseTemporalAmout(Number value, String qualifier, Source source) throws QlIllegalArgumentException, - ArithmeticException { + public static TemporalAmount parseTemporalAmout(Number value, String qualifier, Source source) throws InvalidArgumentException, + ArithmeticException, ParsingException { return switch (qualifier) { case "millisecond", "milliseconds" -> Duration.ofMillis(safeToLong(value)); case "second", "seconds" -> Duration.ofSeconds(safeToLong(value)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 96c35905e3dc0..1446fc54c99fa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -11,9 +11,11 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; @@ -68,6 +70,23 @@ public void testAllChronoFields() { } } + public void testInvalidChrono() { + String chrono = randomAlphaOfLength(10); + DriverContext driverContext = driverContext(); + InvalidArgumentException e = expectThrows( + InvalidArgumentException.class, + () -> evaluator( + new DateExtract( + Source.EMPTY, + new Literal(Source.EMPTY, new BytesRef(chrono), DataTypes.KEYWORD), + field("str", DataTypes.DATETIME), + null + ) + ).get(driverContext) + ); + assertThat(e.getMessage(), equalTo("invalid date field for []: " + chrono)); + } + @Override protected Expression build(Source source, List args) { return new DateExtract(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index f6ead24ff34a6..5c5af560aec08 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -11,9 +11,12 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -22,6 +25,7 @@ import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; public class DateParseTests extends AbstractScalarFunctionTestCase { public DateParseTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -60,6 +64,22 @@ public static Iterable parameters() { ); } + public void testInvalidPattern() { + String pattern = randomAlphaOfLength(10); + DriverContext driverContext = driverContext(); + InvalidArgumentException e = expectThrows( + InvalidArgumentException.class, + () -> evaluator( + new DateParse( + Source.EMPTY, + new Literal(Source.EMPTY, new BytesRef(pattern), DataTypes.KEYWORD), + field("str", DataTypes.KEYWORD) + ) + ).get(driverContext) + ); + assertThat(e.getMessage(), startsWith("invalid date pattern for []: Invalid format: [" + pattern + "]")); + } + @Override protected Expression build(Source source, List args) { return new DateParse(source, args.get(0), args.size() > 1 ? args.get(1) : null); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index 9854dfbe11460..e0611c7125e6e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -18,17 +18,15 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.util.Arrays; import java.util.List; import java.util.function.Supplier; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -67,13 +65,6 @@ protected DataType expectedType(List argTypes) { return DataTypes.KEYWORD; } - private Matcher resultsMatcher(List typedData) { - String str = ((BytesRef) typedData.get(0).data()).utf8ToString(); - String delim = ((BytesRef) typedData.get(1).data()).utf8ToString(); - List split = Arrays.stream(str.split(Pattern.quote(delim))).map(BytesRef::new).toList(); - return equalTo(split.size() == 1 ? split.get(0) : split); - } - @Override protected List argSpec() { return List.of(required(strings()), required(strings())); @@ -107,4 +98,20 @@ public void testConstantDelimiter() { } } } + + public void testTooLongConstantDelimiter() { + String delimiter = randomAlphaOfLength(2); + DriverContext driverContext = driverContext(); + InvalidArgumentException e = expectThrows( + InvalidArgumentException.class, + () -> evaluator( + new Split( + Source.EMPTY, + field("str", DataTypes.KEYWORD), + new Literal(Source.EMPTY, new BytesRef(delimiter), DataTypes.KEYWORD) + ) + ).get(driverContext) + ); + assertThat(e.getMessage(), equalTo("delimiter must be single byte for now")); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index c9b33a3f2f020..1657b371bfeda 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -37,6 +37,8 @@ import java.time.Period; import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD; @@ -129,6 +131,9 @@ public void testStringLiteralsExceptions() { () -> whereExpression("\"\"\"\"\"\" foo \"\"\"\" == abc"), "line 1:23: mismatched input 'foo' expecting {," ); + + var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining()); + assertParsingException(() -> parse("row foo == " + number), "line 1:13: Number [" + number + "] is too large"); } public void testBooleanLiteralsCondition() { From c40dc38d0b62ac245ae00ebc3df15dcc431eccd4 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 8 Jan 2024 12:27:06 -0800 Subject: [PATCH 13/47] Suppress this-escape for java 21 (#104089) Suppress the this-escape warnings to allow running with JDK21. --- .../admin/cluster/node/hotthreads/NodesHotThreadsResponse.java | 2 ++ .../single/instance/TransportInstanceSingleOperationAction.java | 1 + 2 files changed, 3 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java index c0ceef18ca462..892629dbe46f7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java @@ -29,10 +29,12 @@ public class NodesHotThreadsResponse extends BaseNodesResponse { + @SuppressWarnings("this-escape") private final RefCounted refs = LeakTracker.wrap( AbstractRefCounted.of(() -> Releasables.wrap(Iterators.map(getNodes().iterator(), n -> n::decRef)).close()) ); + @SuppressWarnings("this-escape") public NodesHotThreadsResponse(ClusterName clusterName, List nodes, List failures) { super(clusterName, nodes, failures); for (NodeHotThreads nodeHotThreads : getNodes()) { diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 79255c891d297..80b7a95bbe0de 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -57,6 +57,7 @@ public abstract class TransportInstanceSingleOperationAction< final String shardActionName; + @SuppressWarnings("this-escape") protected TransportInstanceSingleOperationAction( String actionName, ThreadPool threadPool, From a1cf9ec6a7eae233cbd8331d3f0f2c9e76dc4716 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 8 Jan 2024 21:27:49 +0100 Subject: [PATCH 14/47] Expose API key authentication metrics (#103178) This PR adds metrics for recording successful and failed authentications for API keys as well the authentication time itself. Exposed metrics are: - `es.security.authc.api_key.success.count` - `es.security.authc.api_key.failures.count` - `es.security.authc.api_key.time` Each of the metric is exposed at node level and includes additional API key information through these attributes: - `es.security.api_key_id` - unique API key identifier - `es.security.api_key_type` - API key type (`rest` or `cross_cluster`) - `es.security.api_key_authc_failure_reason` - failure message (e.g. `api key is expired`) Relates: ES-7468 --- docs/changelog/103178.yaml | 5 + .../xpack/security/Security.java | 10 +- .../security/authc/ApiKeyAuthenticator.java | 85 ++++-- .../security/authc/AuthenticationService.java | 6 +- .../InstrumentedSecurityActionListener.java | 45 +++ .../SecurityMetricAttributesBuilder.java | 21 ++ .../security/metric/SecurityMetricGroup.java | 21 ++ .../security/metric/SecurityMetricInfo.java | 39 +++ .../security/metric/SecurityMetricType.java | 57 ++++ .../security/metric/SecurityMetrics.java | 90 ++++++ .../xpack/security/SecurityTests.java | 4 +- .../authc/ApiKeyAuthenticatorTests.java | 271 +++++++++++++++++- .../authc/AuthenticationServiceTests.java | 34 ++- .../support/SecondaryAuthenticatorTests.java | 4 +- 14 files changed, 644 insertions(+), 48 deletions(-) create mode 100644 docs/changelog/103178.yaml create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/InstrumentedSecurityActionListener.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricAttributesBuilder.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricGroup.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricInfo.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetrics.java diff --git a/docs/changelog/103178.yaml b/docs/changelog/103178.yaml new file mode 100644 index 0000000000000..5da0221a68984 --- /dev/null +++ b/docs/changelog/103178.yaml @@ -0,0 +1,5 @@ +pr: 103178 +summary: Expose API key authentication metrics +area: Authentication +type: enhancement +issues: [] diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 1d849055c70a5..a9af4b4ba104a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -89,6 +89,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -648,7 +649,8 @@ public Collection createComponents(PluginServices services) { services.xContentRegistry(), services.environment(), services.nodeEnvironment().nodeMetadata(), - services.indexNameExpressionResolver() + services.indexNameExpressionResolver(), + services.telemetryProvider() ); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); @@ -666,7 +668,8 @@ Collection createComponents( NamedXContentRegistry xContentRegistry, Environment environment, NodeMetadata nodeMetadata, - IndexNameExpressionResolver expressionResolver + IndexNameExpressionResolver expressionResolver, + TelemetryProvider telemetryProvider ) throws Exception { logger.info("Security is {}", enabled ? "enabled" : "disabled"); if (enabled == false) { @@ -944,7 +947,8 @@ Collection createComponents( tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService.get() + operatorPrivilegesService.get(), + telemetryProvider.getMeterRegistry() ) ); components.add(authcService.get()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticator.java index fadf03a19904c..10bd68e05007e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticator.java @@ -10,23 +10,46 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.security.authc.ApiKeyService.ApiKeyCredentials; +import org.elasticsearch.xpack.security.metric.InstrumentedSecurityActionListener; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; +import org.elasticsearch.xpack.security.metric.SecurityMetrics; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.LongSupplier; import static org.elasticsearch.core.Strings.format; class ApiKeyAuthenticator implements Authenticator { + public static final String ATTRIBUTE_API_KEY_ID = "es.security.api_key_id"; + public static final String ATTRIBUTE_API_KEY_TYPE = "es.security.api_key_type"; + public static final String ATTRIBUTE_API_KEY_AUTHC_FAILURE_REASON = "es.security.api_key_authc_failure_reason"; + private static final Logger logger = LogManager.getLogger(ApiKeyAuthenticator.class); + private final SecurityMetrics authenticationMetrics; private final ApiKeyService apiKeyService; private final String nodeName; - ApiKeyAuthenticator(ApiKeyService apiKeyService, String nodeName) { + ApiKeyAuthenticator(ApiKeyService apiKeyService, String nodeName, MeterRegistry meterRegistry) { + this(apiKeyService, nodeName, meterRegistry, System::nanoTime); + } + + ApiKeyAuthenticator(ApiKeyService apiKeyService, String nodeName, MeterRegistry meterRegistry, LongSupplier nanoTimeSupplier) { + this.authenticationMetrics = new SecurityMetrics<>( + SecurityMetricType.AUTHC_API_KEY, + meterRegistry, + this::buildMetricAttributes, + nanoTimeSupplier + ); this.apiKeyService = apiKeyService; this.nodeName = nodeName; } @@ -51,30 +74,44 @@ public void authenticate(Context context, ActionListener { - if (authResult.isAuthenticated()) { - final Authentication authentication = Authentication.newApiKeyAuthentication(authResult, nodeName); - listener.onResponse(AuthenticationResult.success(authentication)); - } else if (authResult.getStatus() == AuthenticationResult.Status.TERMINATE) { - Exception e = (authResult.getException() != null) - ? authResult.getException() - : Exceptions.authenticationError(authResult.getMessage()); - logger.debug(() -> "API key service terminated authentication for request [" + context.getRequest() + "]", e); - context.getRequest().exceptionProcessingRequest(e, authenticationToken); - listener.onFailure(e); - } else { - if (authResult.getMessage() != null) { - if (authResult.getException() != null) { - logger.warn( - () -> format("Authentication using apikey failed - %s", authResult.getMessage()), - authResult.getException() - ); - } else { - logger.warn("Authentication using apikey failed - {}", authResult.getMessage()); + apiKeyService.tryAuthenticate( + context.getThreadContext(), + apiKeyCredentials, + InstrumentedSecurityActionListener.wrapForAuthc(authenticationMetrics, apiKeyCredentials, ActionListener.wrap(authResult -> { + if (authResult.isAuthenticated()) { + final Authentication authentication = Authentication.newApiKeyAuthentication(authResult, nodeName); + listener.onResponse(AuthenticationResult.success(authentication)); + } else if (authResult.getStatus() == AuthenticationResult.Status.TERMINATE) { + Exception e = (authResult.getException() != null) + ? authResult.getException() + : Exceptions.authenticationError(authResult.getMessage()); + logger.debug(() -> "API key service terminated authentication for request [" + context.getRequest() + "]", e); + context.getRequest().exceptionProcessingRequest(e, authenticationToken); + listener.onFailure(e); + } else { + if (authResult.getMessage() != null) { + if (authResult.getException() != null) { + logger.warn( + () -> format("Authentication using apikey failed - %s", authResult.getMessage()), + authResult.getException() + ); + } else { + logger.warn("Authentication using apikey failed - {}", authResult.getMessage()); + } } + listener.onResponse(AuthenticationResult.unsuccessful(authResult.getMessage(), authResult.getException())); } - listener.onResponse(AuthenticationResult.unsuccessful(authResult.getMessage(), authResult.getException())); - } - }, e -> listener.onFailure(context.getRequest().exceptionProcessingRequest(e, null)))); + }, e -> listener.onFailure(context.getRequest().exceptionProcessingRequest(e, null)))) + ); + } + + private Map buildMetricAttributes(ApiKeyCredentials credentials, String failureReason) { + final Map attributes = new HashMap<>(failureReason != null ? 3 : 2); + attributes.put(ATTRIBUTE_API_KEY_ID, credentials.getId()); + attributes.put(ATTRIBUTE_API_KEY_TYPE, credentials.getExpectedType().value()); + if (failureReason != null) { + attributes.put(ATTRIBUTE_API_KEY_AUTHC_FAILURE_REASON, failureReason); + } + return attributes; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 7d06798e7401b..e522f02891148 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.node.Node; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -88,7 +89,8 @@ public AuthenticationService( TokenService tokenService, ApiKeyService apiKeyService, ServiceAccountService serviceAccountService, - OperatorPrivilegesService operatorPrivilegesService + OperatorPrivilegesService operatorPrivilegesService, + MeterRegistry meterRegistry ) { this.realms = realms; this.auditTrailService = auditTrailService; @@ -111,7 +113,7 @@ public AuthenticationService( new AuthenticationContextSerializer(), new ServiceAccountAuthenticator(serviceAccountService, nodeName), new OAuth2TokenAuthenticator(tokenService), - new ApiKeyAuthenticator(apiKeyService, nodeName), + new ApiKeyAuthenticator(apiKeyService, nodeName, meterRegistry), new RealmsAuthenticator(numInvalidation, lastSuccessfulAuthCache) ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/InstrumentedSecurityActionListener.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/InstrumentedSecurityActionListener.java new file mode 100644 index 0000000000000..101f49258dd59 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/InstrumentedSecurityActionListener.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; + +public class InstrumentedSecurityActionListener { + + /** + * Wraps the provided {@code listener} and returns a new wrapped listener which handles authentication metrics collection. + * + * @param metrics The metrics to collect. + * @param context The context object is used to collect and attach additional metric attributes. + * @param listener The authentication result handling listener. + * @return a new "wrapped" listener which overrides onResponse and onFailure methods in order to collect authentication metrics. + * @param The type of authentication result value. + * @param The type of context object which is used to attach additional attributes to collected authentication metrics. + */ + public static ActionListener> wrapForAuthc( + final SecurityMetrics metrics, + final C context, + final ActionListener> listener + ) { + assert metrics.type().group() == SecurityMetricGroup.AUTHC; + final long startTimeNano = metrics.relativeTimeInNanos(); + return ActionListener.runBefore(ActionListener.wrap(result -> { + if (result.isAuthenticated()) { + metrics.recordSuccess(context); + } else { + metrics.recordFailure(context, result.getMessage()); + } + listener.onResponse(result); + }, e -> { + metrics.recordFailure(context, e.getMessage()); + listener.onFailure(e); + }), () -> metrics.recordTime(context, startTimeNano)); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricAttributesBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricAttributesBuilder.java new file mode 100644 index 0000000000000..37dd1b75eec2d --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricAttributesBuilder.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +import java.util.Map; + +@FunctionalInterface +public interface SecurityMetricAttributesBuilder { + + Map build(C context, String failureReason); + + default Map build(C context) { + return build(context, null); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricGroup.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricGroup.java new file mode 100644 index 0000000000000..9d334a603a21b --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricGroup.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +/** + * Enumerates all metric groups we want to collect. + */ +public enum SecurityMetricGroup { + + AUTHC, + + AUTHZ, + + ; + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricInfo.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricInfo.java new file mode 100644 index 0000000000000..d80d0f581ea5b --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricInfo.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.Objects; + +/** + * Holds all metric information needed to register a metric in {@link MeterRegistry}. + * + * @param name The unique metric name. + * @param description The brief metric description. + * @param unit The metric unit (e.g. count). + */ +public record SecurityMetricInfo(String name, String description, String unit) { + + public SecurityMetricInfo(String name, String description, String unit) { + this.name = Objects.requireNonNull(name); + this.description = Objects.requireNonNull(description); + this.unit = Objects.requireNonNull(unit); + } + + public LongCounter registerAsLongCounter(MeterRegistry meterRegistry) { + return meterRegistry.registerLongCounter(this.name(), this.description(), this.unit()); + } + + public LongHistogram registerAsLongHistogram(MeterRegistry meterRegistry) { + return meterRegistry.registerLongHistogram(this.name(), this.description(), this.unit()); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java new file mode 100644 index 0000000000000..75a15525bdd55 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +/** + * Defines all security metric types that can be collected. + */ +public enum SecurityMetricType { + + AUTHC_API_KEY( + SecurityMetricGroup.AUTHC, + new SecurityMetricInfo("es.security.authc.api_key.success.count", "Number of successful API key authentications.", "count"), + new SecurityMetricInfo("es.security.authc.api_key.failures.count", "Number of failed API key authentications.", "count"), + new SecurityMetricInfo("es.security.authc.api_key.time", "Time it took (in nanoseconds) to execute API key authentication.", "ns") + ), + + ; + + private final SecurityMetricGroup group; + private final SecurityMetricInfo successMetricInfo; + private final SecurityMetricInfo failuresMetricInfo; + private final SecurityMetricInfo timeMetricInfo; + + SecurityMetricType( + SecurityMetricGroup group, + SecurityMetricInfo successMetricInfo, + SecurityMetricInfo failuresMetricInfo, + SecurityMetricInfo timeMetricInfo + ) { + this.group = group; + this.successMetricInfo = successMetricInfo; + this.failuresMetricInfo = failuresMetricInfo; + this.timeMetricInfo = timeMetricInfo; + } + + public SecurityMetricGroup group() { + return this.group; + } + + public SecurityMetricInfo successMetricInfo() { + return successMetricInfo; + } + + public SecurityMetricInfo failuresMetricInfo() { + return failuresMetricInfo; + } + + public SecurityMetricInfo timeMetricInfo() { + return timeMetricInfo; + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetrics.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetrics.java new file mode 100644 index 0000000000000..bf993516bf964 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetrics.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.metric; + +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.Objects; +import java.util.function.LongSupplier; + +/** + * This class provides a common way for registering and collecting different types of security metrics. + * It allows for recoding the number of successful and failed executions as well as to record the execution time. + * + * @param The type of context object which is used to attach additional attributes to collected metrics. + */ +public final class SecurityMetrics { + + private final LongCounter successCounter; + private final LongCounter failuresCounter; + private final LongHistogram timeHistogram; + + private final SecurityMetricAttributesBuilder attributesBuilder; + private final LongSupplier nanoTimeSupplier; + private final SecurityMetricType metricType; + + public SecurityMetrics( + final SecurityMetricType metricType, + final MeterRegistry meterRegistry, + final SecurityMetricAttributesBuilder attributesBuilder, + final LongSupplier nanoTimeSupplier + ) { + this.metricType = Objects.requireNonNull(metricType); + this.successCounter = metricType.successMetricInfo().registerAsLongCounter(meterRegistry); + this.failuresCounter = metricType.failuresMetricInfo().registerAsLongCounter(meterRegistry); + this.timeHistogram = metricType.timeMetricInfo().registerAsLongHistogram(meterRegistry); + this.attributesBuilder = Objects.requireNonNull(attributesBuilder); + this.nanoTimeSupplier = Objects.requireNonNull(nanoTimeSupplier); + } + + public SecurityMetricType type() { + return this.metricType; + } + + /** + * Returns a value of nanoseconds that may be used for relative time calculations. + * This method should only be used for calculating time deltas. + */ + public long relativeTimeInNanos() { + return nanoTimeSupplier.getAsLong(); + } + + /** + * Records a single success execution. + * + * @param context The context object which is used to attach additional attributes to success metric. + */ + public void recordSuccess(final C context) { + this.successCounter.incrementBy(1L, attributesBuilder.build(context)); + } + + /** + * Records a single failed execution. + * + * @param context The context object which is used to attach additional attributes to failed metric. + * @param failureReason The optional failure reason which is stored as an attributed with recorded failure metric. + */ + public void recordFailure(final C context, final String failureReason) { + this.failuresCounter.incrementBy(1L, attributesBuilder.build(context, failureReason)); + } + + /** + * Records a time in nanoseconds. This method should be called after the execution with provided start time. + * The {@link #relativeTimeInNanos()} should be used to record the start time. + * + * @param context The context object which is used to attach additional attributes to collected metric. + * @param startTimeNano The start time (in nanoseconds) before the execution. + */ + public void recordTime(final C context, final long startTimeNano) { + final long timeInNanos = relativeTimeInNanos() - startTimeNano; + this.timeHistogram.record(timeInNanos, this.attributesBuilder.build(context)); + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 022f7228a056f..1735b9443c78f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -224,7 +225,8 @@ private Collection createComponentsUtil(Settings settings) throws Except xContentRegistry(), env, nodeMetadata, - TestIndexNameExpressionResolver.newInstance(threadContext) + TestIndexNameExpressionResolver.newInstance(threadContext), + TelemetryProvider.NOOP ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticatorTests.java index ab11b6bf4e572..e5a7456e79d6d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyAuthenticatorTests.java @@ -14,16 +14,26 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.action.apikey.ApiKey; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.ApiKeyService.ApiKeyCredentials; import org.elasticsearch.xpack.security.authc.AuthenticationService.AuditableRequest; +import org.elasticsearch.xpack.security.metric.SecurityMetricType; + +import java.util.List; +import java.util.Map; +import java.util.function.LongSupplier; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; @@ -37,15 +47,15 @@ public class ApiKeyAuthenticatorTests extends ESTestCase { public void testAuditingOnAuthenticationTermination() { final ApiKeyService apiKeyService = mock(ApiKeyService.class); - final ApiKeyAuthenticator apiKeyAuthenticator = new ApiKeyAuthenticator(apiKeyService, randomAlphaOfLengthBetween(3, 8)); + final ApiKeyAuthenticator apiKeyAuthenticator = new ApiKeyAuthenticator( + apiKeyService, + randomAlphaOfLengthBetween(3, 8), + MeterRegistry.NOOP + ); final Authenticator.Context context = mock(Authenticator.Context.class); - final ApiKeyCredentials apiKeyCredentials = new ApiKeyCredentials( - randomAlphaOfLength(20), - new SecureString(randomAlphaOfLength(20).toCharArray()), - randomFrom(ApiKey.Type.values()) - ); + final ApiKeyCredentials apiKeyCredentials = randomApiKeyCredentials(); when(context.getMostRecentAuthenticationToken()).thenReturn(apiKeyCredentials); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); when(context.getThreadContext()).thenReturn(threadContext); @@ -72,4 +82,253 @@ public void testAuditingOnAuthenticationTermination() { } } + public void testRecordingSuccessfulAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final ApiKeyService apiKeyService = mock(ApiKeyService.class); + final ApiKeyAuthenticator apiKeyAuthenticator = createApiKeyAuthenticator(apiKeyService, telemetryPlugin, nanoTimeSupplier); + + final ApiKeyCredentials apiKeyCredentials = randomApiKeyCredentials(); + final Authenticator.Context context = mockApiKeyAuthenticationContext(apiKeyCredentials); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + final ActionListener> listener = invocation.getArgument(2); + nanoTimeSupplier.advanceTime(executionTimeInNanos); + listener.onResponse( + AuthenticationResult.success( + new User(randomAlphaOfLengthBetween(3, 8)), + Map.ofEntries( + Map.entry(AuthenticationField.API_KEY_ID_KEY, apiKeyCredentials.getId()), + Map.entry(AuthenticationField.API_KEY_TYPE_KEY, apiKeyCredentials.getExpectedType().value()) + ) + ) + ); + return null; + }).when(apiKeyService).tryAuthenticate(any(), same(apiKeyCredentials), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + apiKeyAuthenticator.authenticate(context, future); + final AuthenticationResult authResult = future.actionGet(); + assertThat(authResult.isAuthenticated(), equalTo(true)); + + List successMetrics = telemetryPlugin.getLongCounterMeasurement( + SecurityMetricType.AUTHC_API_KEY.successMetricInfo().name() + ); + assertThat(successMetrics.size(), equalTo(1)); + + // verify that we always record a single authentication + assertThat(successMetrics.get(0).getLong(), equalTo(1L)); + // and that all attributes are present + assertThat( + successMetrics.get(0).attributes(), + equalTo( + Map.ofEntries( + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_ID, apiKeyCredentials.getId()), + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_TYPE, apiKeyCredentials.getExpectedType().value()) + ) + ) + ); + + // verify that there were no failures recorded + assertZeroFailedAuthMetrics(telemetryPlugin); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, apiKeyCredentials, executionTimeInNanos); + } + + public void testRecordingFailedAuthenticationMetrics() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(1, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final ApiKeyService apiKeyService = mock(ApiKeyService.class); + final ApiKeyAuthenticator apiKeyAuthenticator = createApiKeyAuthenticator(apiKeyService, telemetryPlugin, nanoTimeSupplier); + + final ApiKeyCredentials apiKeyCredentials = randomApiKeyCredentials(); + final Authenticator.Context context = mockApiKeyAuthenticationContext(apiKeyCredentials); + + final Exception exception = randomFrom(new ElasticsearchException("API key auth exception"), null); + final boolean failWithTermination = randomBoolean(); + final AuthenticationResult failedAuth; + if (failWithTermination) { + failedAuth = AuthenticationResult.terminate("terminated API key auth", exception); + } else { + failedAuth = AuthenticationResult.unsuccessful("unsuccessful API key auth", exception); + } + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener> listener = invocation.getArgument(2); + listener.onResponse(failedAuth); + return Void.TYPE; + }).when(apiKeyService).tryAuthenticate(any(), same(apiKeyCredentials), anyActionListener()); + final PlainActionFuture> future = new PlainActionFuture<>(); + apiKeyAuthenticator.authenticate(context, future); + + if (failWithTermination) { + final Exception e = expectThrows(Exception.class, future::actionGet); + if (exception == null) { + assertThat(e, instanceOf(ElasticsearchSecurityException.class)); + assertThat(e.getMessage(), containsString("terminated API key auth")); + } else { + assertThat(e, sameInstance(exception)); + } + assertSingleFailedAuthMetric(telemetryPlugin, apiKeyCredentials, "terminated API key auth"); + } else { + var authResult = future.actionGet(); + assertThat(authResult.isAuthenticated(), equalTo(false)); + assertSingleFailedAuthMetric(telemetryPlugin, apiKeyCredentials, "unsuccessful API key auth"); + } + + // verify that there were no successes recorded + assertZeroSuccessAuthMetrics(telemetryPlugin); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, apiKeyCredentials, executionTimeInNanos); + } + + public void testRecordingFailedAuthenticationMetricsOnExceptions() { + final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + final long initialNanoTime = randomLongBetween(0, 100); + final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime); + final ApiKeyService apiKeyService = mock(ApiKeyService.class); + final ApiKeyAuthenticator apiKeyAuthenticator = createApiKeyAuthenticator(apiKeyService, telemetryPlugin, nanoTimeSupplier); + + final ApiKeyCredentials apiKeyCredentials = randomApiKeyCredentials(); + final Authenticator.Context context = mockApiKeyAuthenticationContext(apiKeyCredentials); + + final ElasticsearchSecurityException exception = new ElasticsearchSecurityException("API key auth exception"); + when(context.getRequest().exceptionProcessingRequest(same(exception), any())).thenReturn(exception); + + final long executionTimeInNanos = randomLongBetween(0, 500); + doAnswer(invocation -> { + nanoTimeSupplier.advanceTime(executionTimeInNanos); + final ActionListener> listener = invocation.getArgument(2); + listener.onFailure(exception); + return Void.TYPE; + }).when(apiKeyService).tryAuthenticate(any(), same(apiKeyCredentials), anyActionListener()); + + final PlainActionFuture> future = new PlainActionFuture<>(); + apiKeyAuthenticator.authenticate(context, future); + + var e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e, sameInstance(exception)); + + // expecting single recorded auth failure with message same as the thrown exception + assertSingleFailedAuthMetric(telemetryPlugin, apiKeyCredentials, "API key auth exception"); + + // verify that there were no successes recorded + assertZeroSuccessAuthMetrics(telemetryPlugin); + + // verify we recorded authentication time + assertAuthenticationTimeMetric(telemetryPlugin, apiKeyCredentials, executionTimeInNanos); + } + + private void assertSingleFailedAuthMetric( + TestTelemetryPlugin telemetryPlugin, + ApiKeyCredentials apiKeyCredentials, + String failureMessage + ) { + List failuresMetrics = telemetryPlugin.getLongCounterMeasurement( + SecurityMetricType.AUTHC_API_KEY.failuresMetricInfo().name() + ); + assertThat(failuresMetrics.size(), equalTo(1)); + assertThat( + failuresMetrics.get(0).attributes(), + equalTo( + Map.ofEntries( + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_ID, apiKeyCredentials.getId()), + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_TYPE, apiKeyCredentials.getExpectedType().value()), + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_AUTHC_FAILURE_REASON, failureMessage) + ) + ) + ); + } + + private void assertAuthenticationTimeMetric( + TestTelemetryPlugin telemetryPlugin, + ApiKeyCredentials credentials, + long expectedAuthenticationTime + ) { + List authTimeMetrics = telemetryPlugin.getLongHistogramMeasurement( + SecurityMetricType.AUTHC_API_KEY.timeMetricInfo().name() + ); + assertThat(authTimeMetrics.size(), equalTo(1)); + assertThat(authTimeMetrics.get(0).getLong(), equalTo(expectedAuthenticationTime)); + assertThat( + authTimeMetrics.get(0).attributes(), + equalTo( + Map.ofEntries( + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_ID, credentials.getId()), + Map.entry(ApiKeyAuthenticator.ATTRIBUTE_API_KEY_TYPE, credentials.getExpectedType().value()) + ) + ) + ); + } + + private void assertZeroSuccessAuthMetrics(TestTelemetryPlugin telemetryPlugin) { + List successMetrics = telemetryPlugin.getLongCounterMeasurement( + SecurityMetricType.AUTHC_API_KEY.successMetricInfo().name() + ); + assertThat(successMetrics.size(), equalTo(0)); + } + + private void assertZeroFailedAuthMetrics(TestTelemetryPlugin telemetryPlugin) { + List failuresMetrics = telemetryPlugin.getLongCounterMeasurement( + SecurityMetricType.AUTHC_API_KEY.failuresMetricInfo().name() + ); + assertThat(failuresMetrics.size(), equalTo(0)); + } + + private static ApiKeyCredentials randomApiKeyCredentials() { + return new ApiKeyCredentials( + randomAlphaOfLength(12), + new SecureString(randomAlphaOfLength(20).toCharArray()), + randomFrom(ApiKey.Type.values()) + ); + } + + private static ApiKeyAuthenticator createApiKeyAuthenticator( + ApiKeyService apiKeyService, + TestTelemetryPlugin telemetryPlugin, + LongSupplier nanoTimeSupplier + ) { + return new ApiKeyAuthenticator( + apiKeyService, + randomAlphaOfLengthBetween(3, 8), + telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(), + nanoTimeSupplier + ); + } + + private static Authenticator.Context mockApiKeyAuthenticationContext(ApiKeyCredentials apiKeyCredentials) { + final Authenticator.Context context = mock(Authenticator.Context.class); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + when(context.getMostRecentAuthenticationToken()).thenReturn(apiKeyCredentials); + when(context.getThreadContext()).thenReturn(threadContext); + final AuditableRequest auditableRequest = mock(AuditableRequest.class); + when(context.getRequest()).thenReturn(auditableRequest); + return context; + } + + private static class TestNanoTimeSupplier implements LongSupplier { + + private long currentTime; + + TestNanoTimeSupplier(long initialTime) { + this.currentTime = initialTime; + } + + public void advanceTime(long timeToAdd) { + this.currentTime += timeToAdd; + } + + @Override + public long getAsLong() { + return currentTime; + } + } + } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 6fb0d69175307..c524847e9dbbb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -364,7 +365,8 @@ public void init() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); } @@ -660,7 +662,8 @@ public void testAuthenticateSmartRealmOrderingDisabled() { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); User user = new User("_username", "r1"); when(firstRealm.supports(token)).thenReturn(true); @@ -1040,7 +1043,8 @@ public void testAuthenticateTransportContextAndHeader() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); boolean requestIdAlreadyPresent = randomBoolean(); SetOnce reqId = new SetOnce<>(); @@ -1090,7 +1094,8 @@ public void testAuthenticateTransportContextAndHeader() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); threadContext2.putHeader(AuthenticationField.AUTHENTICATION_KEY, authHeaderRef.get()); @@ -1113,7 +1118,8 @@ public void testAuthenticateTransportContextAndHeader() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); service.authenticate("_action", new InternalRequest(), InternalUsers.SYSTEM_USER, ActionListener.wrap(result -> { if (requestIdAlreadyPresent) { @@ -1175,7 +1181,8 @@ public void testWrongTokenDoesNotFallbackToAnonymous() { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -1219,7 +1226,8 @@ public void testWrongApiKeyDoesNotFallbackToAnonymous() { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); doAnswer(invocationOnMock -> { final GetRequest request = (GetRequest) invocationOnMock.getArguments()[0]; @@ -1283,7 +1291,8 @@ public void testAnonymousUserRest() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); RestRequest request = new FakeRestRequest(); @@ -1319,7 +1328,8 @@ public void testAuthenticateRestRequestDisallowAnonymous() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); RestRequest request = new FakeRestRequest(); @@ -1350,7 +1360,8 @@ public void testAnonymousUserTransportNoDefaultUser() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); InternalRequest message = new InternalRequest(); boolean requestIdAlreadyPresent = randomBoolean(); @@ -1385,7 +1396,8 @@ public void testAnonymousUserTransportWithDefaultUser() throws Exception { tokenService, apiKeyService, serviceAccountService, - operatorPrivilegesService + operatorPrivilegesService, + MeterRegistry.NOOP ); InternalRequest message = new InternalRequest(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java index b3a6bed9a5a94..9a8bb5764ce2d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.license.TestUtils; import org.elasticsearch.license.internal.XPackLicenseStatus; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestMatchers; import org.elasticsearch.test.rest.FakeRestRequest; @@ -156,7 +157,8 @@ public void setupMocks() throws Exception { tokenService, apiKeyService, serviceAccountService, - OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE + OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE, + MeterRegistry.NOOP ); authenticator = new SecondaryAuthenticator(securityContext, authenticationService, auditTrail); } From da79323c1093d7d582d4a616d359d548a99555a5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 8 Jan 2024 15:45:30 -0500 Subject: [PATCH 15/47] ESQL: Fix memory tests (#104082) This improves the memory tests of ESQL by removing the hard memory limits, instead calculating "how much memory is too much" on the fly. We do a binary search on the operation itself, looking for the first value that'll throw. Closes #103789 --- .../elasticsearch/test/BreakerTestUtil.java | 85 ++++++++++++++++ .../AggregatorFunctionTestCase.java | 7 -- .../GroupingAggregatorFunctionTestCase.java | 6 -- .../ValuesSourceReaderOperatorTests.java | 2 +- .../operator/AggregationOperatorTests.java | 6 -- .../operator/ColumnExtractOperatorTests.java | 6 -- .../compute/operator/EvalOperatorTests.java | 6 -- .../compute/operator/FilterOperatorTests.java | 6 -- .../operator/ForkingOperatorTestCase.java | 2 - .../HashAggregationOperatorTests.java | 6 -- .../compute/operator/LimitOperatorTests.java | 2 +- .../operator/MvExpandOperatorTests.java | 2 +- .../compute/operator/OperatorTestCase.java | 97 +++++++++---------- .../operator/ProjectOperatorTests.java | 2 +- .../operator/StringExtractOperatorTests.java | 6 -- .../operator/topn/TopNOperatorTests.java | 9 -- 16 files changed, 135 insertions(+), 115 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/test/BreakerTestUtil.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/BreakerTestUtil.java b/test/framework/src/main/java/org/elasticsearch/test/BreakerTestUtil.java new file mode 100644 index 0000000000000..c34219e86ef66 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/BreakerTestUtil.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +public class BreakerTestUtil { + private static final Logger logger = LogManager.getLogger(BreakerTestUtil.class); + + /** + * Performs a binary search between 0 and {@code tooBigToBreak} bytes for the largest memory size + * that'll cause the closure parameter to throw a {@link CircuitBreakingException}. + */ + public static ByteSizeValue findBreakerLimit(ByteSizeValue tooBigToBreak, CheckedConsumer c) + throws E { + + // Validate arguments: we don't throw for tooBigToBreak and we *do* throw for 0. + try { + c.accept(tooBigToBreak); + } catch (CircuitBreakingException e) { + throw new IllegalArgumentException("expected runnable *not* to break under tooBigToBreak", e); + } + try { + c.accept(ByteSizeValue.ofBytes(0)); + throw new IllegalArgumentException("expected runnable to break under a limit of 0 bytes"); + } catch (CircuitBreakingException e) { + // desired + } + + // Perform the actual binary search + long l = findBreakerLimit(0, tooBigToBreak.getBytes(), c); + + // Validate results: we *do* throw for limit, we don't throw for limit + 1 + ByteSizeValue limit = ByteSizeValue.ofBytes(l); + ByteSizeValue onePastLimit = ByteSizeValue.ofBytes(l + 1); + try { + c.accept(limit); + throw new IllegalArgumentException("expected runnable to break under a limit of " + limit + " bytes"); + } catch (CircuitBreakingException e) { + // desired + } + try { + c.accept(onePastLimit); + } catch (CircuitBreakingException e) { + throw new IllegalArgumentException("expected runnable to break under a limit of " + onePastLimit + " bytes"); + } + return limit; + } + + /** + * A binary search of memory limits, looking for the lowest limit that'll break. + */ + private static long findBreakerLimit(long min, long max, CheckedConsumer c) throws E { + // max is an amount of memory that doesn't break + // min is an amount of memory that *does* break + while (max - min > 1) { + assert max > min; + long diff = max - min; + logger.info( + "Between {} and {}. {} bytes remaining.", + ByteSizeValue.ofBytes(min), + ByteSizeValue.ofBytes(max), + ByteSizeValue.ofBytes(diff) + ); + long mid = min + diff / 2; + try { + c.accept(ByteSizeValue.ofBytes(mid)); + max = mid; + } catch (CircuitBreakingException e) { + min = mid; + } + } + return min; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 18dd086a1d013..c41b7a8475066 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.aggregation; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; @@ -82,12 +81,6 @@ protected final void assertSimpleOutput(List input, List results) { assertSimpleOutput(input.stream().map(p -> p.getBlock(0)).toList(), result); } - @Override - protected ByteSizeValue memoryLimitForSimple() { - // This is a super conservative limit that should cause all aggs to break - return ByteSizeValue.ofBytes(20); - } - public final void testIgnoresNulls() { int end = between(1_000, 100_000); List results = new ArrayList<>(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 3b93107b9d0de..a6e88234dc25b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.aggregation; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -144,11 +143,6 @@ protected final void assertSimpleOutput(List input, List results) { } } - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofBytes(100); - } - public final void testNullGroupsAndValues() { DriverContext driverContext = driverContext(); BlockFactory blockFactory = driverContext.blockFactory(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 56ab1da63f58a..ada0582a2fad8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -395,7 +395,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue memoryLimitForSimple() { + protected ByteSizeValue enoughMemoryForSimple() { assumeFalse("strange exception in the test, fix soon", true); return ByteSizeValue.ofKb(1); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 6810751bd1a7c..884b702a3b703 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunction; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; @@ -80,9 +79,4 @@ protected void assertSimpleOutput(List input, List results) { sum.assertSimpleOutput(input.stream().map(p -> p.getBlock(0)).toList(), sums); max.assertSimpleOutput(input.stream().map(p -> p.getBlock(0)).toList(), maxs); } - - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofBytes(50); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 5fe0136188a78..2a8c259f069b4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; @@ -95,11 +94,6 @@ protected void assertSimpleOutput(List input, List results) { } } - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofKb(15); - } - public void testAllNullValues() { DriverContext driverContext = driverContext(); BytesRef scratch = new BytesRef(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 79b53914614d8..0894e665b8fed 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; @@ -114,9 +113,4 @@ public void testReadFromBlock() { results.forEach(Page::releaseBlocks); assertThat(context.breaker().getUsed(), equalTo(0L)); } - - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofKb(4); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index 8af136d52bc62..d68e03203b9af 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -113,9 +112,4 @@ public void testReadFromBlock() { results.forEach(Page::releaseBlocks); assertThat(context.breaker().getUsed(), equalTo(0L)); } - - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofKb(1); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 08ce7b8422253..87675e3139a43 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.data.BlockTestUtils; @@ -177,7 +176,6 @@ protected void start(Driver driver, ActionListener listener) { // @com.carrotsearch.randomizedtesting.annotations.Repeat(iterations = 100) public final void testManyInitialManyPartialFinalRunnerThrowing() throws Exception { DriverContext driverContext = driverContext(); - BigArrays bigArrays = nonBreakingBigArrays(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 87da8ac01877c..948d12b5744e5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunction; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; @@ -91,9 +90,4 @@ protected void assertSimpleOutput(List input, List results) { max.assertSimpleGroup(input, maxs, i, group); } } - - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofKb(1); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index c8266908cfba8..e366646ecd0f5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -50,7 +50,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue memoryLimitForSimple() { + protected ByteSizeValue enoughMemoryForSimple() { assumeFalse("doesn't allocate, just filters", true); return null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index d25d108d194e6..165e5b80b9a58 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -200,7 +200,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue memoryLimitForSimple() { + protected ByteSizeValue enoughMemoryForSimple() { assumeFalse("doesn't throw in tests but probably should", true); return ByteSizeValue.ofBytes(1); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 4327696c81d77..0890ba669f0a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -28,10 +28,10 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.test.BreakerTestUtil; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.junit.AssumptionViolatedException; import java.util.ArrayList; import java.util.Iterator; @@ -73,69 +73,64 @@ public final void testSimpleLargeInput() { } /** - * A {@link ByteSizeValue} that is small enough that running {@link #simple} - * on {@link #simpleInput} will exhaust the breaker and throw a - * {@link CircuitBreakingException}. We should make an effort to make this - * number as large as possible and still cause a break consistently so we get - * good test coverage. If the operator can't break then throw an - * {@link AssumptionViolatedException}. + * Enough memory for {@link #simple} not to throw a {@link CircuitBreakingException}. + * It's fine if this is much more memory than {@linkplain #simple} needs. + * When we want to make {@linkplain #simple} throw we'll find the precise amount of memory + * that'll make it throw with a binary search. */ - protected abstract ByteSizeValue memoryLimitForSimple(); - - /** - * Run {@link #simple} with a circuit breaker limited to somewhere - * between 0 bytes and {@link #memoryLimitForSimple} and assert that - * it breaks in a sane way. - */ - public final void testSimpleCircuitBreaking() { - testSimpleCircuitBreaking(ByteSizeValue.ofBytes(randomLongBetween(0, memoryLimitForSimple().getBytes()))); + protected ByteSizeValue enoughMemoryForSimple() { + return ByteSizeValue.ofGb(1); } /** - * Run {@link #simple} with a circuit breaker configured limited to - * {@link #memoryLimitForSimple} and assert that it breaks in a sane way. - *

- * This test helps to make sure that the limits set by - * {@link #memoryLimitForSimple} aren't too large. - * {@link #testSimpleCircuitBreaking}, with it's random configured - * limit will use the actual maximum very rarely. - *

+ * Run {@link #simple} with a circuit breaker many times, making sure all blocks + * are properly released. In particular, we perform a binary search to find the + * largest amount of memory that'll throw a {@link CircuitBreakingException} with + * starting bounds of {@code 0b} and {@link #enoughMemoryForSimple}. Then we pick + * a random amount of memory between {@code 0b} and the maximum and run that, + * asserting both that this throws a {@link CircuitBreakingException} and releases + * all pages. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103789") - public final void testSimpleCircuitBreakingAtLimit() { - testSimpleCircuitBreaking(memoryLimitForSimple()); + public final void testSimpleCircuitBreaking() { + ByteSizeValue memoryLimitForSimple = enoughMemoryForSimple(); + Operator.OperatorFactory simple = simple(); + DriverContext inputFactoryContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); + try { + ByteSizeValue limit = BreakerTestUtil.findBreakerLimit( + memoryLimitForSimple, + l -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), l) + ); + ByteSizeValue testWithSize = ByteSizeValue.ofBytes(randomLongBetween(0, limit.getBytes())); + logger.info("testing with {} against a limit of {}", testWithSize, limit); + Exception e = expectThrows( + CircuitBreakingException.class, + () -> runWithLimit(simple, CannedSourceOperator.deepCopyOf(input), testWithSize) + ); + assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + } finally { + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); + } + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } - private void testSimpleCircuitBreaking(ByteSizeValue limit) { - /* - * We build two CircuitBreakers - one for the input blocks and one for the operation itself. - * The input blocks don't count against the memory usage for the limited operator that we - * build. - */ - DriverContext inputFactoryContext = driverContext(); + private void runWithLimit(Operator.OperatorFactory factory, List input, ByteSizeValue limit) { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); - Operator.OperatorFactory simple = simple(); - logger.info("running {} with {}", simple, bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST)); - List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); DriverContext driverContext = new DriverContext(bigArrays, blockFactory); - boolean[] driverStarted = new boolean[1]; - Exception e = expectThrows(CircuitBreakingException.class, () -> { - var operator = simple.get(driverContext); - driverStarted[0] = true; + boolean driverStarted = false; + try { + var operator = factory.get(driverContext); + driverStarted = true; drive(operator, input.iterator(), driverContext); - }); - if (driverStarted[0] == false) { - // if drive hasn't even started then we need to release the input pages - Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); + } finally { + if (driverStarted == false) { + // if drive hasn't even started then we need to release the input pages manually + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(input.iterator(), p -> p::releaseBlocks))); + } + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } - assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); - - // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); - assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index ab50cb65792a0..572657c7c8226 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -96,7 +96,7 @@ protected void assertSimpleOutput(List input, List results) { } @Override - protected ByteSizeValue memoryLimitForSimple() { + protected ByteSizeValue enoughMemoryForSimple() { assumeTrue("doesn't allocate", false); return null; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index ff9dc56f7556b..c8a329be7b72a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.operator; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; @@ -83,11 +82,6 @@ protected void assertSimpleOutput(List input, List results) { } } - @Override - protected ByteSizeValue memoryLimitForSimple() { - return ByteSizeValue.ofKb(15); - } - public void testMultivalueDissectInput() { StringExtractOperator operator = new StringExtractOperator(new String[] { "test" }, new EvalOperator.ExpressionEvaluator() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 117e66eced2f8..ba4f547d80ce1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -177,15 +177,6 @@ protected void assertSimpleOutput(List input, List results) { ); } - @Override - protected ByteSizeValue memoryLimitForSimple() { - /* - * 775 causes us to blow up while collecting values and 780 doesn't - * trip the breaker. - */ - return ByteSizeValue.ofBytes(775); - } - public void testRamBytesUsed() { RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() { @Override From 0b6b67d77e92c0a82f5f3785f36ac702e24eeb7c Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 8 Jan 2024 21:14:10 +0000 Subject: [PATCH 16/47] Enable failure_store feature for X-Pack full cluster restart tests (#104098) This test suite indirectly uses functionality that requires the failure_store functionality. Closes #104078 --- .../xpack/restart/AbstractXpackFullClusterRestartTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java index 0bc9101301a54..96acaaa5b41b4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/AbstractXpackFullClusterRestartTestCase.java @@ -36,6 +36,7 @@ public abstract class AbstractXpackFullClusterRestartTestCase extends Parameteri .keystore("xpack.watcher.encryption_key", Resource.fromClasspath("system_key")) .keystore("xpack.security.transport.ssl.secure_key_passphrase", "testnode") .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .build(); public AbstractXpackFullClusterRestartTestCase(FullClusterRestartUpgradeStatus upgradeStatus) { From 6111d967a44632088ddaeb6cd73fa5d947b48cad Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 8 Jan 2024 23:09:07 +0100 Subject: [PATCH 17/47] Fix missing links in geo_point docs for ESQL (#104073) --- docs/reference/esql/esql-functions.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/esql/esql-functions.asciidoc b/docs/reference/esql/esql-functions.asciidoc index c2e943f7555d6..c463c13a34e74 100644 --- a/docs/reference/esql/esql-functions.asciidoc +++ b/docs/reference/esql/esql-functions.asciidoc @@ -61,9 +61,11 @@ these functions: * <> * <> * <> +* <> * <> * <> * <> +* <> * <> * <> * <> From f693d38169612867e7e49fd15e4ba3c3ad8541d8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 8 Jan 2024 17:46:26 -0500 Subject: [PATCH 18/47] ESQL: Add javadoc to some shared methods (#103337) This adds some javadoc to some of the methods that are shared by huge parts of ES|QL's "expression" tree. These methods have fairly complicated contracts that play off of each other so this javadoc is quite handy. --- .../evaluator/mapper/EvaluatorMapper.java | 10 +++++ .../xpack/ql/expression/Expression.java | 38 +++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java index 3ab555799ee34..e536547e006fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/mapper/EvaluatorMapper.java @@ -26,6 +26,16 @@ * Expressions that have a mapping to an {@link ExpressionEvaluator}. */ public interface EvaluatorMapper { + /** + * Build an {@link ExpressionEvaluator.Factory} for the tree of + * expressions rooted at this node. This is only guaranteed to return + * a sensible evaluator if this node has a valid type. If this node + * is a subclass of {@link Expression} then "valid type" means that + * {@link Expression#typeResolved} returns a non-error resolution. + * If {@linkplain Expression#typeResolved} returns an error then + * this method may throw. Or return an evaluator that produces + * garbage. Or return an evaluator that throws when run. + */ ExpressionEvaluator.Factory toEvaluator(Function toEvaluator); /** diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java index 9e95dab82df19..0bbe663dab90e 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java @@ -99,6 +99,26 @@ public boolean childrenResolved() { return lazyChildrenResolved; } + /** + * Does the tree rooted at this expression have valid types at all nodes? + *

+ * For example, {@code SIN(1.2)} has a valid type and should return + * {@link TypeResolution#TYPE_RESOLVED} to signal "this type is fine". + * Another example, {@code SIN("cat")} has an invalid type in the + * tree. The value passed to the {@code SIN} function is a string which + * doesn't make any sense. So this method should return a "failure" + * resolution which it can build by calling {@link TypeResolution#TypeResolution(String)}. + *

+ *

+ * Take {@code SIN(1.2) + COS(ATAN("cat"))}, this tree should also + * fail, specifically because {@code ATAN("cat")} is invalid. This should + * fail even though {@code +} is perfectly valid when run on the results + * of {@code SIN} and {@code COS}. And {@code COS} can operate on the results + * of any valid call to {@code ATAN}. For this method to return a "valid" + * result the whole tree rooted at this expression must + * be valid. + *

+ */ public final TypeResolution typeResolved() { if (lazyTypeResolution == null) { lazyTypeResolution = resolveType(); @@ -106,6 +126,17 @@ public final TypeResolution typeResolved() { return lazyTypeResolution; } + /** + * The implementation of {@link #typeResolved}, which is just a caching wrapper + * around this method. See it's javadoc for what this method should return. + *

+ * Implementations will rarely interact with the {@link TypeResolution} + * class directly, instead usually calling the utility methods on {@link TypeResolutions}. + *

+ *

+ * Implementations should fail if {@link #childrenResolved()} returns {@code false}. + *

+ */ protected TypeResolution resolveType() { return TypeResolution.TYPE_RESOLVED; } @@ -142,6 +173,13 @@ public boolean resolved() { return childrenResolved() && typeResolved().resolved(); } + /** + * The {@link DataType} returned by executing the tree rooted at this + * expression. If {@link #typeResolved()} returns an error then the behavior + * of this method is undefined. It may return a valid + * type. Or it may throw an exception. Or it may return a totally nonsensical + * type. + */ public abstract DataType dataType(); @Override From 1c0ff412aacb3d62f372855ccfb306333bf9b700 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 9 Jan 2024 17:47:26 +1100 Subject: [PATCH 19/47] [Certutil] Verify certificates after generation (#103948) This commit modifies the "elasticsearch-certutil cert" command to add a verify step after the generation of the new certificate. The primary purpose of this verify is to ensure that the key used to sign the certificate matches the issuing certificate that was provided. That is, if the CA is provided using PEM files, that the `--ca-cert` and `--ca-key` are a matching pair. There are more efficient ways to tell is a certificate and key are a matching pair if you know what type of key is being used (RSA, EC, etc) but post-signing verification is the most straight-forward way to perform a key-type agnostic check, and is a useful general purpose check to include in certificate generation. Closes: #98207 --- docs/changelog/103948.yaml | 6 + .../xpack/security/cli/CertificateTool.java | 29 ++++- .../security/cli/CertificateToolTests.java | 115 +++++++++++++----- 3 files changed, 115 insertions(+), 35 deletions(-) create mode 100644 docs/changelog/103948.yaml diff --git a/docs/changelog/103948.yaml b/docs/changelog/103948.yaml new file mode 100644 index 0000000000000..3247183fc97bb --- /dev/null +++ b/docs/changelog/103948.yaml @@ -0,0 +1,6 @@ +pr: 103948 +summary: '''elasticsearch-certutil cert'' now verifies the issuing chain of the generated + certificate' +area: TLS +type: enhancement +issues: [] diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java index 82a6a5fc55c13..24ece3ff99bc4 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java @@ -57,6 +57,7 @@ import java.nio.file.StandardOpenOption; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFilePermission; +import java.security.GeneralSecurityException; import java.security.Key; import java.security.KeyPair; import java.security.KeyStore; @@ -570,6 +571,25 @@ static void writePkcs12( } }); } + + /** + * Verify that the provided certificate is validly signed by the provided CA + */ + static void verifyIssuer(Certificate certificate, CAInfo caInfo, Terminal terminal) throws UserException { + try { + certificate.verify(caInfo.certAndKey.cert.getPublicKey()); + } catch (GeneralSecurityException e) { + terminal.errorPrintln(""); + terminal.errorPrintln("* ERROR *"); + terminal.errorPrintln("Verification of generated certificate failed."); + terminal.errorPrintln("This usually occurs if the provided CA certificate does not match with the CA key."); + terminal.errorPrintln("Cause: " + e); + for (var c = e.getCause(); c != null; c = c.getCause()) { + terminal.errorPrintln(" - " + c); + } + throw new UserException(ExitCodes.CONFIG, "Certificate verification failed"); + } + } } static class SigningRequestCommand extends CertificateCommand { @@ -788,7 +808,7 @@ void generateAndWriteSignedCertificates( final boolean usePassword = super.useOutputPassword(options); fullyWriteZipFile(output, (outputStream, pemWriter) -> { for (CertificateInformation certificateInformation : certs) { - CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days); + CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days, terminal); final String dirName = certificateInformation.name.filename + "/"; ZipEntry zipEntry = new ZipEntry(dirName); @@ -836,7 +856,7 @@ void generateAndWriteSignedCertificates( } else { assert certs.size() == 1; CertificateInformation certificateInformation = certs.iterator().next(); - CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days); + CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days, terminal); fullyWriteFile( output, stream -> writePkcs12( @@ -856,7 +876,8 @@ private static CertificateAndKey generateCertificateAndKey( CertificateInformation certificateInformation, CAInfo caInfo, int keySize, - int days + int days, + Terminal terminal ) throws Exception { KeyPair keyPair = CertGenUtils.generateKeyPair(keySize); Certificate certificate; @@ -873,6 +894,7 @@ private static CertificateAndKey generateCertificateAndKey( caInfo.certAndKey.key, days ); + verifyIssuer(certificate, caInfo, terminal); } else { certificate = CertGenUtils.generateSignedCertificate( certificateInformation.name.x500Principal, @@ -940,6 +962,7 @@ private void writeCertificateAuthority(Path output, CAInfo caInfo, boolean write ); } } + } @SuppressForbidden(reason = "resolve paths against CWD for a CLI tool") diff --git a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java index a0484de419fe7..702bfac2a3ea5 100644 --- a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java @@ -27,6 +27,7 @@ import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.openssl.PEMParser; import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.Terminal; @@ -36,6 +37,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.KeyStoreUtil; import org.elasticsearch.common.ssl.PemUtils; +import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.IOUtils; @@ -60,6 +62,7 @@ import java.io.Reader; import java.net.InetAddress; import java.net.URI; +import java.net.URISyntaxException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Files; @@ -281,8 +284,7 @@ public void testGeneratingCsr() throws Exception { assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE)); assertEquals(perms.toString(), 2, perms.size()); - FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outputFile.toUri()), Collections.emptyMap()); - Path zipRoot = fileSystem.getPath("/"); + final Path zipRoot = getRootPathOfZip(outputFile); assertFalse(Files.exists(zipRoot.resolve("ca"))); for (CertificateInformation certInfo : certInfos) { @@ -341,8 +343,7 @@ public void testGeneratingSignedPemCertificates() throws Exception { assertTrue(perms.toString(), perms.contains(PosixFilePermission.OWNER_WRITE)); assertEquals(perms.toString(), 2, perms.size()); - FileSystem fileSystem = FileSystems.newFileSystem(new URI("jar:" + outputFile.toUri()), Collections.emptyMap()); - Path zipRoot = fileSystem.getPath("/"); + final Path zipRoot = getRootPathOfZip(outputFile); assertFalse(Files.exists(zipRoot.resolve("ca"))); @@ -460,8 +461,7 @@ public void testHandleLongPasswords() throws Exception { Certificate caCert = caKeyStore.getCertificate("ca"); assertThat(caCert, notNullValue()); - FileSystem zip = FileSystems.newFileSystem(new URI("jar:" + pemZipFile.toUri()), Collections.emptyMap()); - Path zipRoot = zip.getPath("/"); + final Path zipRoot = getRootPathOfZip(pemZipFile); final Path keyPath = zipRoot.resolve("cert/cert.key"); final PrivateKey key = PemUtils.readPrivateKey(keyPath, () -> longPassword.toCharArray()); @@ -645,7 +645,7 @@ public void testCreateCaAndMultipleInstances() throws Exception { final String node2Ip = "200.182." + randomIntBetween(1, 250) + "." + randomIntBetween(1, 250); final String node3Ip = "200.183." + randomIntBetween(1, 250) + "." + randomIntBetween(1, 250); - final String caPassword = generateCA(caFile, terminal, env); + final String caPassword = generateCA(caFile, terminal, env, false); final GenerateCertificateCommand gen1Command = new PathAwareGenerateCertificateCommand(caFile, node1File); final OptionSet gen1Options = gen1Command.getParser() @@ -716,7 +716,7 @@ public void testCreateCaAndMultipleInstances() throws Exception { node3Ip ); gen3Args.add("-self-signed"); - final GenerateCertificateCommand gen3Command = new PathAwareGenerateCertificateCommand(null, node3File); + final GenerateCertificateCommand gen3Command = new PathAwareGenerateCertificateCommand(Map.of(), node3File); final OptionSet gen3Options = gen3Command.getParser().parse(Strings.toStringArray(gen3Args)); gen3Command.execute(terminal, gen3Options, env, processInfo); @@ -773,7 +773,7 @@ public void testTrustBetweenPEMandPKCS12() throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build()); final Path caFile = tempDir.resolve("ca.p12"); - final String caPassword = generateCA(caFile, terminal, env); + final String caPassword = generateCA(caFile, terminal, env, false); final Path node1Pkcs12 = tempDir.resolve("node1.p12"); final Path pemZip = tempDir.resolve("pem.zip"); @@ -831,8 +831,7 @@ public void testTrustBetweenPEMandPKCS12() throws Exception { assertThat(pemZip, pathExists()); - FileSystem zip2FS = FileSystems.newFileSystem(new URI("jar:" + pemZip.toUri()), Collections.emptyMap()); - Path zip2Root = zip2FS.getPath("/"); + final Path zip2Root = getRootPathOfZip(pemZip); final Path ca2 = zip2Root.resolve("ca/ca.p12"); assertThat(ca2, not(pathExists())); @@ -861,7 +860,7 @@ public void testZipOutputFromCommandLineOptions() throws Exception { final Path zip = tempDir.resolve("pem.zip"); final AtomicBoolean isZip = new AtomicBoolean(false); - final GenerateCertificateCommand genCommand = new PathAwareGenerateCertificateCommand(null, zip) { + final GenerateCertificateCommand genCommand = new PathAwareGenerateCertificateCommand(Map.of(), zip) { @Override void generateAndWriteSignedCertificates( Path output, @@ -892,6 +891,45 @@ Collection getCertificateInformationList(Terminal termin assertThat("For command line option " + optionThatTriggersZip, isZip.get(), equalTo(true)); } + public void testErrorIfSigningCertificateAndKeyDontMatch() throws Exception { + final Path tempDir = initTempDir(); + + final var terminal = MockTerminal.create(); + final var env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", tempDir).build()); + final var processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir()); + + final Path ca1zip = tempDir.resolve("ca1.zip"); + final String ca1Password = generateCA(ca1zip, terminal, env, true); + terminal.reset(); + final Path ca2zip = tempDir.resolve("ca2.zip"); + final String ca2Password = generateCA(ca2zip, terminal, env, true); + + var ca1Root = getRootPathOfZip(ca1zip); + var ca1Cert = ca1Root.resolve("ca/ca.crt"); + var ca1Key = ca1Root.resolve("ca/ca.key"); + + var ca2Root = getRootPathOfZip(ca2zip); + var ca2Key = ca2Root.resolve("ca/ca.key"); + + var p12Out = tempDir.resolve("certs.p12"); + var p12Password = randomAlphaOfLength(8); + + final var gen1Command = new PathAwareGenerateCertificateCommand(Map.of("ca-cert", ca1Cert, "ca-key", ca2Key), p12Out); + final var gen1Options = gen1Command.getParser() + .parse("--ca-cert", "", "--ca-key", "", "--ca-pass", ca2Password, "--out", "", "--pass", p12Password); + + final UserException e = expectThrows(UserException.class, () -> gen1Command.execute(terminal, gen1Options, env, processInfo)); + assertThat(e.exitCode, is(ExitCodes.CONFIG)); + assertThat(e.getMessage(), containsString("Certificate verification failed")); + assertThat(p12Out, not(pathExists())); + + final var gen2Command = new PathAwareGenerateCertificateCommand(Map.of("ca-cert", ca1Cert, "ca-key", ca1Key), p12Out); + final var gen2Options = gen2Command.getParser() + .parse("--ca-cert", "", "--ca-key", "", "--ca-pass", ca1Password, "--out", "", "--pass", p12Password); + gen2Command.execute(terminal, gen2Options, env, processInfo); + assertThat(p12Out, pathExists()); + } + private int getKeySize(Key node1Key) { assertThat(node1Key, instanceOf(RSAKey.class)); return ((RSAKey) node1Key).getModulus().bitLength(); @@ -1034,25 +1072,32 @@ private static Path resolvePath(String path) { return PathUtils.get(path).toAbsolutePath(); } - private String generateCA(Path caFile, MockTerminal terminal, Environment env) throws Exception { + private static Path getRootPathOfZip(Path pemZip) throws IOException, URISyntaxException { + FileSystem zipFS = FileSystems.newFileSystem(new URI("jar:" + pemZip.toUri()), Collections.emptyMap()); + return zipFS.getPath("/"); + } + + private String generateCA(Path caFile, MockTerminal terminal, Environment env, boolean pem) throws Exception { final int caKeySize = randomIntBetween(4, 8) * 512; final int days = randomIntBetween(7, 1500); final String caPassword = randomFrom("", randomAlphaOfLengthBetween(4, 80)); final CertificateAuthorityCommand caCommand = new PathAwareCertificateAuthorityCommand(caFile); - final OptionSet caOptions = caCommand.getParser() - .parse( - "-ca-dn", - "CN=My ElasticSearch Cluster", - "-pass", - caPassword, - "-out", - caFile.toString(), - "-keysize", - String.valueOf(caKeySize), - "-days", - String.valueOf(days) - ); + String[] args = { + "-ca-dn", + "CN=My ElasticSearch Cluster", + "-pass", + caPassword, + "-out", + caFile.toString(), + "-keysize", + String.valueOf(caKeySize), + "-days", + String.valueOf(days) }; + if (pem) { + args = ArrayUtils.append(args, "--pem"); + } + final OptionSet caOptions = caCommand.getParser().parse(args); final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir()); caCommand.execute(terminal, caOptions, env, processInfo); @@ -1091,20 +1136,26 @@ Path resolveOutputPath(Terminal terminal, OptionSet options, String defaultFilen * This class works around that by sticking with the original path objects */ private static class PathAwareGenerateCertificateCommand extends GenerateCertificateCommand { - private final Path caFile; + private final Map inputPaths; private final Path outFile; PathAwareGenerateCertificateCommand(Path caFile, Path outFile) { - this.caFile = caFile; + this(Map.of("ca", caFile), outFile); + } + + PathAwareGenerateCertificateCommand(Map inputPaths, Path outFile) { + this.inputPaths = Map.copyOf(inputPaths); this.outFile = outFile; } @Override protected Path resolvePath(OptionSet options, OptionSpec spec) { - if (spec.options().contains("ca")) { - return caFile; - } - return super.resolvePath(options, spec); + return this.inputPaths.entrySet() + .stream() + .filter(entry -> spec.options().contains(entry.getKey())) + .findFirst() + .map(Entry::getValue) + .orElseGet(() -> super.resolvePath(options, spec)); } @Override From 0e62691aa6e27aea3892172582e8837a9ae3f6ae Mon Sep 17 00:00:00 2001 From: Lloyd Date: Tue, 9 Jan 2024 16:54:34 +0900 Subject: [PATCH 20/47] [Docs] Fixup doc for internal IdP plugin (#102904) https://github.com/elastic/elasticsearch/pull/101855 updated the plugin so that the IdP-initiated flow for the SAML init endpoint would work the same as the SP-initiated flow. This updates the docs to reflect that. Co-authored-by: Elastic Machine --- .../docs/en/rest-api/idp-saml-init.asciidoc | 41 +++++++++++++++---- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/identity-provider/docs/en/rest-api/idp-saml-init.asciidoc b/x-pack/plugin/identity-provider/docs/en/rest-api/idp-saml-init.asciidoc index 561b5abd8a660..22c53b4612c39 100644 --- a/x-pack/plugin/identity-provider/docs/en/rest-api/idp-saml-init.asciidoc +++ b/x-pack/plugin/identity-provider/docs/en/rest-api/idp-saml-init.asciidoc @@ -98,8 +98,9 @@ Provider that should receive this SAML Response. -------------------------------------------------------------------- // TESTRESPONSE[skip:Do not enable identity provider for the docs cluster, at least not yet] -A failed call, in the case of an SP initiated SSO returns a SAML Response as an XML String with its status set to the appropriate error -code indicating that the authentication request failed and the reason for that failure. A `saml_status` of +A failed call, in the case of an SP initiated SSO returns a standard Elasticsearch error response with the appropriate HTTP Status code, +with the error containing a `saml_initiate_single_sign_on_response` field holding a SAML Response as an XML String with its status +set to the appropriate error code indicating that the authentication request failed and the reason for that failure. A `saml_status` of `urn:oasis:names:tc:SAML:2.0:status:Requester` indicates that the error is on the side of the SP or the user, while a `saml_status` of `urn:oasis:names:tc:SAML:2.0:status:Responder` indicates that something went wrong in the IDP side. The `error` field contains a short human friendly interpretation of the error that is outside the SAML standard and is meant to be communicated to the user, especially @@ -108,13 +109,35 @@ if the user is not redirected back the SP with the `saml_response` [source, console-result] -------------------------------------------------------------------- { - "post_url" : "https://sp1.kibana.org/saml/acs", - "saml_response" : "?xml version="1.0" encoding="UTF-8"?>https://idp.cloud.elastic.co...removed for brevity...", - "saml_status" : "urn:oasis:names:tc:SAML:2.0:status:Requester", - "error" : "User [user1] is not permitted to access service [https://sp1.kibana.org]", - "service_provider" : { - "entity_id" : "https://sp1.kibana.org" - } + "error":{ + "root_cause":[ + { + "type":"saml_initiate_single_sign_on_exception", + "reason":"User [es_user] is not permitted to access service [ec:abcdef:123456]", + "saml_initiate_single_sign_on_response":{ + "post_url":"https://AVoMOJLJfbru.elastic-cloud.com/saml/acs", + "saml_response":"urn:elastic:cloud:idp", + "saml_status":"urn:oasis:names:tc:SAML:2.0:status:Requester", + "error":"User [es_user] is not permitted to access service [ec:abcdef:123456]", + "service_provider":{ + "entity_id":"ec:abcdef:123456" + } + } + } + ], + "type":"saml_initiate_single_sign_on_exception", + "reason":"User [es_user] is not permitted to access service [ec:abcdef:123456]", + "saml_initiate_single_sign_on_response":{ + "post_url":"https://AVoMOJLJfbru.elastic-cloud.com/saml/acs", + "saml_response":"urn:elastic:cloud:idp", + "saml_status":"urn:oasis:names:tc:SAML:2.0:status:Requester", + "error":"User [es_user] is not permitted to access service [ec:abcdef:123456]", + "service_provider":{ + "entity_id":"ec:abcdef:123456" + } + } + }, + "status":403 } -------------------------------------------------------------------- // TESTRESPONSE[skip:Do not enable identity provider for the docs cluster, at least not yet] From 51521e2beab8b3c5333b34683e18195122d0843d Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 08:19:19 +0000 Subject: [PATCH 21/47] Retain ref to requests when running ActionFilterChain (#104000) `ActionFilter` implementations may be async, so we have to keep the request alive while the chain is running. Closes #103952 --- .../action/support/TransportAction.java | 19 +- ...portActionFilterChainRefCountingTests.java | 207 ++++++++++++++++++ .../TransportActionFilterChainTests.java | 20 +- 3 files changed, 231 insertions(+), 15 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index cb5a9ce3db353..35f1b645293bd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; @@ -58,8 +60,13 @@ public final void execute(Task task, Request request, ActionListener l listener = new TaskResultStoringActionListener<>(taskManager, task, listener); } - RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger); - requestFilterChain.proceed(task, actionName, request, listener); + // Note on request refcounting: we can be sure that either we get to the end of the chain (and execute the actual action) or + // we complete the response listener and short-circuit the outer chain, so we release our request ref on both paths, using + // Releasables#releaseOnce to avoid a double-release. + request.mustIncRef(); + final var releaseRef = Releasables.releaseOnce(request::decRef); + RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger, releaseRef); + requestFilterChain.proceed(task, actionName, request, ActionListener.runBefore(listener, releaseRef::close)); } protected abstract void doExecute(Task task, Request request, ActionListener listener); @@ -71,10 +78,12 @@ private static class RequestFilterChain action; private final AtomicInteger index = new AtomicInteger(); private final Logger logger; + private final Releasable releaseRef; - private RequestFilterChain(TransportAction action, Logger logger) { + private RequestFilterChain(TransportAction action, Logger logger, Releasable releaseRef) { this.action = action; this.logger = logger; + this.releaseRef = releaseRef; } @Override @@ -84,7 +93,9 @@ public void proceed(Task task, String actionName, Request request, ActionListene if (i < this.action.filters.length) { this.action.filters[i].apply(task, actionName, request, listener, this); } else if (i == this.action.filters.length) { - this.action.doExecute(task, request, listener); + try (releaseRef) { + this.action.doExecute(task, request, listener); + } } else { listener.onFailure(new IllegalStateException("proceed was called too many times")); } diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java new file mode 100644 index 0000000000000..8062bfea5a637 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.LeakTracker; +import org.elasticsearch.transport.TransportService; + +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CountDownLatch; + +public class TransportActionFilterChainRefCountingTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(TestPlugin.class); + } + + static final ActionType TYPE = ActionType.localOnly("test:action"); + + public void testAsyncActionFilterRefCounting() { + final var countDownLatch = new CountDownLatch(2); + final var request = new Request(); + try { + client().execute(TYPE, request, ActionListener.running(countDownLatch::countDown).delegateResponse((delegate, e) -> { + // _If_ we got an exception then it must be an ElasticsearchException with message "short-circuit failure", i.e. we're + // checking that nothing else can go wrong here. But it's also ok for everything to succeed too, in which case we countDown + // the latch without running this block. + assertEquals("short-circuit failure", asInstanceOf(ElasticsearchException.class, e).getMessage()); + delegate.onResponse(null); + })); + } finally { + request.decRef(); + } + request.addCloseListener(ActionListener.running(countDownLatch::countDown)); + safeAwait(countDownLatch); + } + + public static class TestPlugin extends Plugin implements ActionPlugin { + + private ThreadPool threadPool; + + @Override + public Collection createComponents(PluginServices services) { + threadPool = services.threadPool(); + return List.of(); + } + + @Override + public List> getActions() { + return List.of(new ActionHandler<>(TYPE, TestAction.class)); + } + + @Override + public List getActionFilters() { + return randomSubsetOf( + List.of( + new TestAsyncActionFilter(threadPool), + new TestAsyncActionFilter(threadPool), + new TestAsyncMappedActionFilter(threadPool), + new TestAsyncMappedActionFilter(threadPool) + ) + ); + } + } + + private static class TestAsyncActionFilter implements ActionFilter { + + private final ThreadPool threadPool; + private final int order = randomInt(); + + private TestAsyncActionFilter(ThreadPool threadPool) { + this.threadPool = Objects.requireNonNull(threadPool); + } + + @Override + public int order() { + return order; + } + + @Override + public void apply( + Task task, + String action, + Req request, + ActionListener listener, + ActionFilterChain chain + ) { + if (action.equals(TYPE.name())) { + randomFrom(EsExecutors.DIRECT_EXECUTOR_SERVICE, threadPool.generic()).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + assertTrue(request.hasReferences()); + if (randomBoolean()) { + chain.proceed(task, action, request, listener); + } else { + listener.onFailure(new ElasticsearchException("short-circuit failure")); + } + } + }); + } else { + chain.proceed(task, action, request, listener); + } + } + } + + private static class TestAsyncMappedActionFilter extends TestAsyncActionFilter implements MappedActionFilter { + + private TestAsyncMappedActionFilter(ThreadPool threadPool) { + super(threadPool); + } + + @Override + public String actionName() { + return TYPE.name(); + } + } + + public static class TestAction extends TransportAction { + + private final ThreadPool threadPool; + + @Inject + public TestAction(TransportService transportService, ActionFilters actionFilters) { + super(TYPE.name(), actionFilters, transportService.getTaskManager()); + threadPool = transportService.getThreadPool(); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + request.mustIncRef(); + threadPool.generic().execute(ActionRunnable.supply(ActionListener.runBefore(listener, request::decRef), () -> { + assert request.hasReferences(); + return new Response(); + })); + } + } + + private static class Request extends ActionRequest { + private final SubscribableListener closeListeners = new SubscribableListener<>(); + private final RefCounted refs = LeakTracker.wrap(AbstractRefCounted.of(() -> closeListeners.onResponse(null))); + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void incRef() { + refs.incRef(); + } + + @Override + public boolean tryIncRef() { + return refs.tryIncRef(); + } + + @Override + public boolean decRef() { + return refs.decRef(); + } + + @Override + public boolean hasReferences() { + return refs.hasReferences(); + } + + void addCloseListener(ActionListener listener) { + closeListeners.addListener(listener); + } + } + + private static class Response extends ActionResponse { + @Override + public void writeTo(StreamOutput out) {} + } +} diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 59567c1ee9783..64ab7a9819190 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -135,8 +135,6 @@ protected void doExecute(Task task, TestRequest request, ActionListener void execute( @@ -146,15 +144,18 @@ public void exe ActionListener listener, ActionFilterChain actionFilterChain ) { - for (int i = 0; i <= additionalContinueCount; i++) { - actionFilterChain.proceed(task, action, request, listener); - } + // expected proceed() call: + actionFilterChain.proceed(task, action, request, listener); + + // extra, invalid, proceed() call: + actionFilterChain.proceed(task, action, request, listener); } }); Set filters = new HashSet<>(); filters.add(testFilter); + final CountDownLatch latch = new CountDownLatch(2); String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction( @@ -164,18 +165,16 @@ public void exe ) { @Override protected void doExecute(Task task, TestRequest request, ActionListener listener) { - listener.onResponse(new TestResponse()); + latch.countDown(); } }; - final CountDownLatch latch = new CountDownLatch(additionalContinueCount + 1); - final AtomicInteger responses = new AtomicInteger(); final List failures = new CopyOnWriteArrayList<>(); ActionTestUtils.execute(transportAction, null, new TestRequest(), new LatchedActionListener<>(new ActionListener<>() { @Override public void onResponse(TestResponse testResponse) { - responses.incrementAndGet(); + fail("should not complete listener"); } @Override @@ -191,8 +190,7 @@ public void onFailure(Exception e) { assertThat(testFilter.runs.get(), equalTo(1)); assertThat(testFilter.lastActionName, equalTo(actionName)); - assertThat(responses.get(), equalTo(1)); - assertThat(failures.size(), equalTo(additionalContinueCount)); + assertThat(failures.size(), equalTo(1)); for (Throwable failure : failures) { assertThat(failure, instanceOf(IllegalStateException.class)); } From cd2bb08957a9cad43e1d45608782f8a8b42d1db1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 08:22:41 +0000 Subject: [PATCH 22/47] Set thread name used by REST client (#103160) By default the REST client uses a thread factory which names its threads with the generic pattern `I/O dispatcher %d`. This commit adds the prefix `elasticsearch-rest-client-`, and a client-instance-specific ID, to the name of these threads to make them easier to identify. --- .../client/RestClientBuilder.java | 26 +++++++++++- .../client/RestClientBuilderIntegTests.java | 40 +++++++++++++++++++ docs/changelog/103160.yaml | 5 +++ 3 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/103160.yaml diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index 600ad6d671711..250f3f0b34cec 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -37,6 +37,8 @@ import java.util.Locale; import java.util.Objects; import java.util.Properties; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.atomic.AtomicLong; import javax.net.ssl.SSLContext; @@ -51,6 +53,9 @@ public final class RestClientBuilder { public static final int DEFAULT_MAX_CONN_PER_ROUTE = 10; public static final int DEFAULT_MAX_CONN_TOTAL = 30; + static final String THREAD_NAME_PREFIX = "elasticsearch-rest-client-"; + private static final String THREAD_NAME_FORMAT = THREAD_NAME_PREFIX + "%d-thread-%d"; + public static final String VERSION; static final String META_HEADER_NAME = "X-Elastic-Client-Meta"; static final String META_HEADER_VALUE; @@ -298,6 +303,24 @@ public RestClient build() { return restClient; } + /** + * Similar to {@code org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor.DefaultThreadFactory} but with better thread names. + */ + private static class RestClientThreadFactory implements ThreadFactory { + private static final AtomicLong CLIENT_THREAD_POOL_ID_GENERATOR = new AtomicLong(); + + private final long clientThreadPoolId = CLIENT_THREAD_POOL_ID_GENERATOR.getAndIncrement(); // 0-based + private final AtomicLong clientThreadId = new AtomicLong(); + + @Override + public Thread newThread(Runnable runnable) { + return new Thread( + runnable, + String.format(Locale.ROOT, THREAD_NAME_FORMAT, clientThreadPoolId, clientThreadId.incrementAndGet()) // 1-based + ); + } + } + private CloseableHttpAsyncClient createHttpClient() { // default timeouts are all infinite RequestConfig.Builder requestConfigBuilder = RequestConfig.custom() @@ -315,7 +338,8 @@ private CloseableHttpAsyncClient createHttpClient() { .setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL) .setSSLContext(SSLContext.getDefault()) .setUserAgent(USER_AGENT_HEADER_VALUE) - .setTargetAuthenticationStrategy(new PersistentCredentialsAuthenticationStrategy()); + .setTargetAuthenticationStrategy(new PersistentCredentialsAuthenticationStrategy()) + .setThreadFactory(new RestClientThreadFactory()); if (httpClientConfigCallback != null) { httpClientBuilder = httpClientConfigCallback.customizeHttpClient(httpClientBuilder); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index b9e0e996c3f76..265bd52eabe83 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -42,15 +42,21 @@ import java.security.cert.Certificate; import java.security.cert.CertificateFactory; import java.security.spec.PKCS8EncodedKeySpec; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.TrustManagerFactory; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** @@ -105,6 +111,40 @@ public void testBuilderUsesDefaultSSLContext() throws Exception { } } + public void testBuilderSetsThreadName() throws Exception { + assumeFalse("https://github.com/elastic/elasticsearch/issues/49094", inFipsJvm()); + final SSLContext defaultSSLContext = SSLContext.getDefault(); + try { + SSLContext.setDefault(getSslContext()); + try (RestClient client = buildRestClient()) { + final CountDownLatch latch = new CountDownLatch(1); + client.performRequestAsync(new Request("GET", "/"), new ResponseListener() { + @Override + public void onSuccess(Response response) { + assertThat( + Thread.currentThread().getName(), + allOf( + startsWith(RestClientBuilder.THREAD_NAME_PREFIX), + containsString("elasticsearch"), + containsString("rest-client") + ) + ); + assertEquals(200, response.getStatusLine().getStatusCode()); + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + throw new AssertionError("unexpected", exception); + } + }); + assertTrue(latch.await(10, TimeUnit.SECONDS)); + } + } finally { + SSLContext.setDefault(defaultSSLContext); + } + } + private RestClient buildRestClient() { InetSocketAddress address = httpsServer.getAddress(); return RestClient.builder(new HttpHost(address.getHostString(), address.getPort(), "https")).build(); diff --git a/docs/changelog/103160.yaml b/docs/changelog/103160.yaml new file mode 100644 index 0000000000000..7701aa2b4a8d4 --- /dev/null +++ b/docs/changelog/103160.yaml @@ -0,0 +1,5 @@ +pr: 103160 +summary: Set thread name used by REST client +area: Java Low Level REST Client +type: enhancement +issues: [] From b16620557d86a97cc738f9b4a20b487cff9d4ab0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 09:10:06 +0000 Subject: [PATCH 23/47] Reduce exception-mangling in more tests (#104034) --- .../elasticsearch/snapshots/SnapshotStressTestsIT.java | 5 ++--- .../cluster/NodeConnectionsServiceTests.java | 6 +++--- .../org/elasticsearch/http/DefaultRestChannelTests.java | 4 ++-- .../java/org/elasticsearch/test/rest/FakeRestRequest.java | 4 ++-- .../shared/PartiallyCachedShardAllocationIntegTests.java | 8 ++++---- 5 files changed, 13 insertions(+), 14 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index 2694bd143eb20..f70b86fd4fba2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.ListenableActionFuture; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -1589,7 +1588,7 @@ private static class TrackedSnapshot { private final TrackedCluster.TrackedRepository trackedRepository; private final String snapshotName; private final Semaphore permits = new Semaphore(Integer.MAX_VALUE); - private final AtomicReference> snapshotInfoFutureRef = new AtomicReference<>(); + private final AtomicReference> snapshotInfoFutureRef = new AtomicReference<>(); TrackedSnapshot(TrackedCluster.TrackedRepository trackedRepository, String snapshotName) { this.trackedRepository = trackedRepository; @@ -1628,7 +1627,7 @@ Releasable tryAcquireAllPermits() { } void getSnapshotInfo(Client client, ActionListener listener) { - final ListenableActionFuture newFuture = new ListenableActionFuture<>(); + final SubscribableListener newFuture = new SubscribableListener<>(); final boolean firstRunner = snapshotInfoFutureRef.compareAndSet(null, newFuture); diff --git a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index d93a844476463..85e16821ecb96 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ListenableActionFuture; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -620,8 +620,8 @@ public void openConnection(DiscoveryNode node, ConnectionProfile profile, Action threadPool.generic().execute(() -> { runConnectionBlock(connectionBlock); listener.onResponse(new Connection() { - private final ListenableActionFuture closeListener = new ListenableActionFuture<>(); - private final ListenableActionFuture removedListener = new ListenableActionFuture<>(); + private final SubscribableListener closeListener = new SubscribableListener<>(); + private final SubscribableListener removedListener = new SubscribableListener<>(); private final RefCounted refCounted = AbstractRefCounted.of(() -> closeListener.onResponse(null)); diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 2a2986d974b0d..a22f17702b157 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ListenableActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -570,7 +570,7 @@ public void close() { @TestLogging(reason = "Get HttpTracer to output trace logs", value = "org.elasticsearch.http.HttpTracer:TRACE") public void testHttpTracerSendResponseSuccess() { - final ListenableActionFuture sendResponseFuture = new ListenableActionFuture<>(); + final SubscribableListener sendResponseFuture = new SubscribableListener<>(); final HttpChannel httpChannel = new FakeRestRequest.FakeHttpChannel(InetSocketAddress.createUnresolved("127.0.0.1", 9200)) { @Override public void sendResponse(HttpResponse response, ActionListener listener) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index bcd5867239f90..726d2ec0d963d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -9,7 +9,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ListenableActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -150,7 +150,7 @@ public Exception getInboundException() { public static class FakeHttpChannel implements HttpChannel { private final InetSocketAddress remoteAddress; - private final ListenableActionFuture closeFuture = new ListenableActionFuture<>(); + private final SubscribableListener closeFuture = new SubscribableListener<>(); public FakeHttpChannel(InetSocketAddress remoteAddress) { this.remoteAddress = remoteAddress; diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java index 2824aa22496a1..a55521394f548 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplanation; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.ListenableActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.RoutingNode; @@ -198,10 +198,10 @@ public void testPartialSearchableSnapshotDelaysAllocationUntilNodeCacheStatesKno final MountSearchableSnapshotRequest req = prepareMountRequest(); - final Map> cacheInfoBlocks = ConcurrentCollections.newConcurrentMap(); - final Function> cacheInfoBlockGetter = nodeName -> cacheInfoBlocks.computeIfAbsent( + final Map> cacheInfoBlocks = ConcurrentCollections.newConcurrentMap(); + final Function> cacheInfoBlockGetter = nodeName -> cacheInfoBlocks.computeIfAbsent( nodeName, - ignored -> new ListenableActionFuture<>() + ignored -> new SubscribableListener<>() ); // Unblock all the existing nodes for (final String nodeName : internalCluster().getNodeNames()) { From f14d87be58d851dc8f5a3ef75d2eaa9c16638b14 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 9 Jan 2024 09:17:02 +0000 Subject: [PATCH 24/47] Add ES|QL async query api docs (#104054) This commit adds detailed API docs for the ES|QL async apis, as well as moving the narrative to the ES|QL specific REST docs. Additionally, a few top-level lists are reflowed to reflect that there are now multiple APIs. --- docs/reference/esql/esql-apis.asciidoc | 20 +++ .../esql/esql-async-query-api.asciidoc | 164 +++++++++++++----- .../esql/esql-async-query-delete-api.asciidoc | 42 +++++ .../esql/esql-async-query-get-api.asciidoc | 58 +++++++ docs/reference/esql/esql-query-api.asciidoc | 16 +- docs/reference/esql/esql-rest.asciidoc | 86 +++++++++ docs/reference/esql/esql-using.asciidoc | 4 +- docs/reference/rest-api/index.asciidoc | 4 +- 8 files changed, 336 insertions(+), 58 deletions(-) create mode 100644 docs/reference/esql/esql-apis.asciidoc create mode 100644 docs/reference/esql/esql-async-query-delete-api.asciidoc create mode 100644 docs/reference/esql/esql-async-query-get-api.asciidoc diff --git a/docs/reference/esql/esql-apis.asciidoc b/docs/reference/esql/esql-apis.asciidoc new file mode 100644 index 0000000000000..686a71506bc14 --- /dev/null +++ b/docs/reference/esql/esql-apis.asciidoc @@ -0,0 +1,20 @@ +[[esql-apis]] +== {esql} APIs + +The {es} Query Language ({esql}) provides a powerful way to filter, transform, +and analyze data stored in {es}, and in the future in other runtimes. For an +overview of {esql} and related tutorials, see <>. + +* <> +* <> +* <> +* <> + + +include::esql-query-api.asciidoc[] + +include::esql-async-query-api.asciidoc[] + +include::esql-async-query-get-api.asciidoc[] + +include::esql-async-query-delete-api.asciidoc[] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index e65562bc81a53..bd753f7132b37 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -1,27 +1,17 @@ [[esql-async-query-api]] -== {esql} async query API +=== {esql} async query API ++++ {esql} async query API ++++ -Runs an async {esql} search. +Runs an async <>. -The async query API lets you asynchronously execute a search request, -monitor its progress, and retrieve results as they become available. +The async query API lets you asynchronously execute a query request, +monitor its progress, and retrieve results when they become available. -Executing an <> is commonly quite fast, -however searches across large data sets or frozen data can take some time. -To avoid long waits, run an async {esql} search. - -Searches initiated by this API may return search results or not. The -`wait_for_completion_timeout` property determines how long to wait for -the search results. The default value is 1 second. If the results are -not available by this time, a search id is return which can be later -used to retrieve the results. - -Initiates an async search for an <> -query. The API accepts the same parameters and request body as the -<>. +The API accepts the same parameters and request body as the synchronous +<>, along with additional async related +properties as outline below. [source,console] ---- @@ -39,12 +29,12 @@ POST /_query/async ---- // TEST[setup:library] -If the results are not available within the timeout period, 2 seconds in -this case, the search returns no results but rather a response that +If the results are not available within the given timeout period, 2 seconds +in this case, no results are returned but rather a response that includes: - * A search ID - * An `is_running` value of true, indicating the search is ongoing + * A query ID + * An `is_running` value of _true_, indicating the query is ongoing The query continues to run in the background without blocking other requests. @@ -56,37 +46,119 @@ requests. "is_running": true } ---- -// TEST[skip: no access to search ID - may return response values] - -To check the progress of an async search, use the <> with the search ID. Specify how long you'd like for -complete results in the `wait_for_completion_timeout` parameter. - -[source,console] ----- -GET /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s ----- -// TEST[skip: no access to search ID - may return response values] +// TEST[skip: no access to query ID - may return response values] -If the response's `is_running` value is `false`, the async search has -finished, and the results are returned. +Otherwise, if the response's `is_running` value is `false`, the async +query has finished, and the results are returned. [source,console-result] ---- { - "id": "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=", "is_running": false, "columns": ... } ---- -// TEST[skip: no access to search ID - may return response values] - -Use the <> to -delete an async search before the `keep_alive` period ends. If the query -is still running, {es} cancels it. - -[source,console] ----- -DELETE /_query/async/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= ----- -// TEST[skip: no access to search ID] +// TEST[skip: no access to query ID - may return response values] + +[[esql-async-query-api-request]] +==== {api-request-title} + +`POST /_query/async` + +[[esql-async-query-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `read` +<> for the data stream, index, +or alias you query. + +[[esql-async-query-api-path-params]] +==== {api-path-parms-title} + +The API accepts the same parameters as the synchronous +<>. + +[[esql-async-query-api-request-body]] +==== {api-request-body-title} + +The API accepts the same request body as the synchronous +<>, along with the following +parameters: + +[[esql-async-query-api-wait-for-completion-timeout]] +`wait_for_completion_timeout`:: ++ +-- +(Optional, <>) +Timeout duration to wait for the request to finish. Defaults to a 1 second, +meaning the request waits for 1 second for the query results. + +If this parameter is specified and the request completes during this period, +complete results are returned. + +If the request does not complete during this period, a query +<> is returned. +-- + +[[esql-async-query-api-keep-on-completion]] +`keep_on_completion`:: ++ +-- +(Optional, Boolean) +If `true`, the query and its results are stored in the cluster. + +If `false`, the query and its results are stored in the cluster only if the +request does not complete during the period set by the +<> +parameter. Defaults to `false`. +-- + +`keep_alive`:: ++ +-- +(Optional, <>) +Period for which the query and its results are stored in the cluster. Defaults +to `5d` (five days). + +When this period expires, the query and its results are deleted, even if the +query is still ongoing. + +If the <> parameter +is `false`, {es} only stores async queries that do not complete within the period +set by the <> +parameter, regardless of this value. +-- + +[[esql-async-query-api-response-body]] +==== {api-response-body-title} + +The API returns the same response body as the synchronous +<>, along with the following +properties: + +[[esql-async-query-api-response-body-query-id]] +`id`:: ++ +-- +(string) +Identifier for the query. + +This query ID is only provided if one of the following conditions is met: + +* A query request does not return complete results during the +<> +parameter's timeout period. + +* The query request's <> +parameter is `true`. + +You can use this ID with the <> to get the current status and available results for the query. +-- + +`is_running`:: ++ +-- +(Boolean) +If `true`, the query request is still executing. +-- diff --git a/docs/reference/esql/esql-async-query-delete-api.asciidoc b/docs/reference/esql/esql-async-query-delete-api.asciidoc new file mode 100644 index 0000000000000..90f8c06b9124a --- /dev/null +++ b/docs/reference/esql/esql-async-query-delete-api.asciidoc @@ -0,0 +1,42 @@ +[[esql-async-query-delete-api]] +=== {esql} async query delete API +++++ +{esql} async query delete API +++++ + +The {esql} async query delete API is used to manually delete an async query +by ID. If the query is still running, the query will be cancelled. Otherwise, +the stored results are deleted. + +[source,console] +---- +DELETE /query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM= +---- +// TEST[skip: no access to query ID] + +[[esql-async-query-delete-api-request]] +==== {api-request-title} + +`DELETE /_query/async/` + +[[esql-async-query-delete-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, only the following users can +use this API to delete a query: + +** The authenticated user that submitted the original query request +** Users with the `cancel_task` <> + + +[[esql-async-query-delete-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Identifier for the query to delete. ++ +A query ID is provided in the <>'s +response for a query that does not complete in the awaited time. A query ID is +also provided if the request's <> +parameter is `true`. diff --git a/docs/reference/esql/esql-async-query-get-api.asciidoc b/docs/reference/esql/esql-async-query-get-api.asciidoc new file mode 100644 index 0000000000000..ec68313b2c490 --- /dev/null +++ b/docs/reference/esql/esql-async-query-get-api.asciidoc @@ -0,0 +1,58 @@ +[[esql-async-query-get-api]] +=== {esql} async query get API +++++ +{esql} async query get API +++++ + +Returns the current status and available results for an <> or a stored results. + +[source,console] +---- +GET /_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM= +---- +// TEST[skip: no access to query ID] + +[[esql-async-query-get-api-request]] +==== {api-request-title} + +`GET /_query/async/` + +[[esql-async-query-get-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, only the user who first submitted +the {esql} query can retrieve the results using this API. + +[[esql-async-query-get-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Identifier for the query. ++ +A query ID is provided in the <>'s +response for a query that does not complete in the awaited time. A query ID is +also provided if the request's <> +parameter is `true`. + +[[esql-async-query-get-api-query-params]] +==== {api-query-parms-title} + +`wait_for_completion_timeout`:: +(Optional, <>) +Timeout duration to wait for the request to finish. Defaults to no timeout, +meaning the request waits for complete query results. ++ +If this parameter is specified and the request completes during this period, +complete query results are returned. ++ +If the request does not complete during this period, the response returns an +`is_running` value of `true` and no results. + +[[esql-async-query-get-api-response-body]] +==== {api-response-body-title} + +The {esql} async query get API returns the same response body as the {esql} +query API. See the {esql} query API's <>. diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index afa9ab7254cfa..bbfa41538528a 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -1,5 +1,5 @@ [[esql-query-api]] -== {esql} query API +=== {esql} query API ++++ {esql} query API ++++ @@ -23,13 +23,13 @@ POST /_query [discrete] [[esql-query-api-request]] -=== {api-request-title} +==== {api-request-title} `POST _query` [discrete] [[esql-query-api-prereqs]] -=== {api-prereq-title} +==== {api-prereq-title} * If the {es} {security-features} are enabled, you must have the `read` <> for the data stream, index, @@ -37,7 +37,7 @@ or alias you search. [discrete] [[esql-query-api-query-params]] -=== {api-query-parms-title} +==== {api-query-parms-title} `delimiter`:: (Optional, string) Separator for CSV results. Defaults to `,`. The API only @@ -54,7 +54,7 @@ precedence. [discrete] [role="child_attributes"] [[esql-query-api-request-body]] -=== {api-request-body-title} +==== {api-request-body-title} `columnar`:: (Optional, Boolean) If `true`, returns results in a columnar format. Defaults to @@ -71,7 +71,7 @@ responses. See <>. [discrete] [role="child_attributes"] [[esql-query-api-response-body]] -=== {api-response-body-title} +==== {api-response-body-title} `columns`:: (array of objects) @@ -79,13 +79,13 @@ Column headings for the search results. Each object is a column. + .Properties of `columns` objects [%collapsible%open] -==== +===== `name`:: (string) Name of the column. `type`:: (string) Data type for the column. -==== +===== `rows`:: (array of arrays) diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index 2d47f6e46ff65..11b3e12787e29 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -247,3 +247,89 @@ POST /_query } ---- // TEST[setup:library] + +[discrete] +[[esql-rest-async-query]] +==== Running an async {esql} query + +The <> lets you asynchronously +execute a query request, monitor its progress, and retrieve results when +they become available. + +Executing an {esql} query is commonly quite fast, however queries across +large data sets or frozen data can take some time. To avoid long waits, +run an async {esql} query. + +Queries initiated by the async query API may return results or not. The +`wait_for_completion_timeout` property determines how long to wait for +the results. If the results are not available by this time, a +<> is return which +can be later used to retrieve the results. For example: + +[source,console] +---- +POST /_query/async +{ + "query": """ + FROM library + | EVAL year = DATE_TRUNC(1 YEARS, release_date) + | STATS MAX(page_count) BY year + | SORT year + | LIMIT 5 + """, + "wait_for_completion_timeout": "2s" +} +---- +// TEST[setup:library] + +If the results are not available within the given timeout period, 2 +seconds in this case, no results are returned but rather a response that +includes: + +* A query ID +* An `is_running` value of _true_, indicating the query is ongoing + +The query continues to run in the background without blocking other +requests. + +[source,console-result] +---- +{ + "id": "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=", + "is_running": true +} +---- +// TEST[skip: no access to query ID - may return response values] + +To check the progress of an async query, use the <> with the query ID. Specify how long you'd like +for complete results in the `wait_for_completion_timeout` parameter. + +[source,console] +---- +GET /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s +---- +// TEST[skip: no access to query ID - may return response values] + +If the response's `is_running` value is `false`, the query has finished, +and the results are returned. + +[source,console-result] +---- +{ + "is_running": false, + "columns": ... +} +---- +// TEST[skip: no access to query ID - may return response values] + +Use the <> to +delete an async query before the `keep_alive` period ends. If the query +is still running, {es} cancels it. + +[source,console] +---- +DELETE /_query/async/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= +---- +// TEST[skip: no access to query ID] + diff --git a/docs/reference/esql/esql-using.asciidoc b/docs/reference/esql/esql-using.asciidoc index 235c7defe559b..f11fdd2d058a5 100644 --- a/docs/reference/esql/esql-using.asciidoc +++ b/docs/reference/esql/esql-using.asciidoc @@ -2,7 +2,7 @@ == Using {esql} <>:: -Information about using the <>. +Information about using the <>. <>:: Using {esql} in {kib} to query and aggregate your data, create visualizations, @@ -10,7 +10,7 @@ and set up alerts. <>:: Using {esql} in {elastic-sec} to investigate events in Timeline, create -detection rules, and build {esql} queries using Elastic AI Assistant. +detection rules, and build {esql} queries using Elastic AI Assistant. <>:: Using the <> to list and cancel {esql} queries. diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 59d96d1a26904..7757e7c2f7926 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -24,7 +24,7 @@ not be included yet. * <> * <> * <> -* <> +* <> * <> * <> * <> @@ -72,7 +72,7 @@ include::{es-repo-dir}/data-streams/data-stream-apis.asciidoc[] include::{es-repo-dir}/docs.asciidoc[] include::{es-repo-dir}/ingest/apis/enrich/index.asciidoc[] include::{es-repo-dir}/eql/eql-apis.asciidoc[] -include::{es-repo-dir}/esql/esql-query-api.asciidoc[] +include::{es-repo-dir}/esql/esql-apis.asciidoc[] include::{es-repo-dir}/features/apis/features-apis.asciidoc[] include::{es-repo-dir}/fleet/index.asciidoc[] include::{es-repo-dir}/text-structure/apis/find-structure.asciidoc[leveloffset=+1] From 834d1a8c3d522e642b10e95d292e0512588403a5 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 9 Jan 2024 10:18:20 +0100 Subject: [PATCH 25/47] Make SearchResponseSections RefCounted (#104060) We want to make `SearchHits` ref-counted so we need this thing referring to the hits ref-counted. --- .../search/AbstractSearchAsyncAction.java | 5 +- .../action/search/FetchSearchPhase.java | 28 ++++------ .../action/search/SearchPhaseController.java | 4 +- .../action/search/SearchResponseSections.java | 51 ++++++++++++++++++- .../search/SearchScrollAsyncAction.java | 33 +++++++----- .../TransportOpenPointInTimeAction.java | 6 +-- .../AbstractSearchAsyncActionTests.java | 6 +-- .../action/search/ExpandSearchPhaseTests.java | 31 ++++++++--- .../search/FetchLookupFieldsPhaseTests.java | 26 +++++----- .../action/search/SearchAsyncActionTests.java | 10 +--- .../search/SearchPhaseControllerTests.java | 6 ++- 11 files changed, 129 insertions(+), 77 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index c77a03824a75c..58ab8169ffb30 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchContextMissingException; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -212,9 +211,7 @@ public final void start() { // total hits is null in the response if the tracking of total hits is disabled boolean withTotalHits = trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED; sendSearchResponse( - withTotalHits - ? new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1) - : new SearchResponseSections(SearchHits.EMPTY_WITHOUT_TOTAL_HITS, null, null, false, null, null, 1), + withTotalHits ? SearchResponseSections.EMPTY_WITH_TOTAL_HITS : SearchResponseSections.EMPTY_WITHOUT_TOTAL_HITS, new AtomicArray<>(0) ); return; diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 27ff6a2ab8309..11528f8e1521f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -38,16 +38,11 @@ final class FetchSearchPhase extends SearchPhase { private final AggregatedDfs aggregatedDfs; FetchSearchPhase(SearchPhaseResults resultConsumer, AggregatedDfs aggregatedDfs, SearchPhaseContext context) { - this( - resultConsumer, - aggregatedDfs, - context, - (response, queryPhaseResults) -> new ExpandSearchPhase( - context, - response.hits, - () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults) - ) - ); + this(resultConsumer, aggregatedDfs, context, (response, queryPhaseResults) -> { + response.mustIncRef(); + context.addReleasable(response::decRef); + return new ExpandSearchPhase(context, response.hits, () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults)); + }); } FetchSearchPhase( @@ -229,12 +224,11 @@ private void moveToNextPhase( SearchPhaseController.ReducedQueryPhase reducedQueryPhase, AtomicArray fetchResultsArr ) { - context.executeNextPhase( - this, - nextPhaseFactory.apply( - SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr), - queryResults - ) - ); + var resp = SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr); + try { + context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); + } finally { + resp.decRef(); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index e425d9d66dd69..5ffb9024d3ee1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -360,7 +360,7 @@ public static SearchResponseSections merge( AtomicArray fetchResultsArray ) { if (reducedQueryPhase.isEmptyResult) { - return new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1); + return SearchResponseSections.EMPTY_WITH_TOTAL_HITS; } ScoreDoc[] sortedDocs = reducedQueryPhase.sortedTopDocs.scoreDocs; var fetchResults = fetchResultsArray.asList(); @@ -465,7 +465,7 @@ private static SearchHits getHits( } } return new SearchHits( - hits.toArray(new SearchHit[0]), + hits.toArray(SearchHits.EMPTY), reducedQueryPhase.totalHits, reducedQueryPhase.maxScore, sortedTopDocs.sortFields, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index 6f382b9e5f8d6..805ef033db27a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -8,11 +8,14 @@ package org.elasticsearch.action.search; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.transport.LeakTracker; import java.util.Collections; import java.util.Map; @@ -21,8 +24,26 @@ * Holds some sections that a search response is composed of (hits, aggs, suggestions etc.) during some steps of the search response * building. */ -public class SearchResponseSections { +public class SearchResponseSections implements RefCounted { + public static final SearchResponseSections EMPTY_WITH_TOTAL_HITS = new SearchResponseSections( + SearchHits.EMPTY_WITH_TOTAL_HITS, + null, + null, + false, + null, + null, + 1 + ); + public static final SearchResponseSections EMPTY_WITHOUT_TOTAL_HITS = new SearchResponseSections( + SearchHits.EMPTY_WITHOUT_TOTAL_HITS, + null, + null, + false, + null, + null, + 1 + ); protected final SearchHits hits; protected final Aggregations aggregations; protected final Suggest suggest; @@ -31,6 +52,8 @@ public class SearchResponseSections { protected final Boolean terminatedEarly; protected final int numReducePhases; + private final RefCounted refCounted; + public SearchResponseSections( SearchHits hits, Aggregations aggregations, @@ -47,6 +70,12 @@ public SearchResponseSections( this.timedOut = timedOut; this.terminatedEarly = terminatedEarly; this.numReducePhases = numReducePhases; + refCounted = hits.getHits().length > 0 ? LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + // TODO: noop until hits are ref counted + } + }) : ALWAYS_REFERENCED; } public final boolean timedOut() { @@ -88,4 +117,24 @@ public final Map profile() { } return profileResults.getShardResults(); } + + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index 885fd98fbdc15..0616a99fc5dd0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -245,20 +245,25 @@ protected final void sendResponse( if (request.scroll() != null) { scrollId = request.scrollId(); } - ActionListener.respondAndRelease( - listener, - new SearchResponse( - SearchPhaseController.merge(true, queryPhase, fetchResults), - scrollId, - this.scrollId.getContext().length, - successfulOps.get(), - 0, - buildTookInMillis(), - buildShardFailures(), - SearchResponse.Clusters.EMPTY, - null - ) - ); + var sections = SearchPhaseController.merge(true, queryPhase, fetchResults); + try { + ActionListener.respondAndRelease( + listener, + new SearchResponse( + sections, + scrollId, + this.scrollId.getContext().length, + successfulOps.get(), + 0, + buildTookInMillis(), + buildShardFailures(), + SearchResponse.Clusters.EMPTY, + null + ) + ); + } finally { + sections.decRef(); + } } catch (Exception e) { listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures())); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index eb01cb2f3137b..3b1093c207854 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; @@ -252,10 +251,7 @@ public void onFailure(Exception e) { @Override protected void doRun() { - sendSearchResponse( - new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), - results.getAtomicArray() - ); + sendSearchResponse(SearchResponseSections.EMPTY_WITH_TOTAL_HITS, results.getAtomicArray()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 8cbcf4962e156..bd6171e353add 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; @@ -194,10 +193,7 @@ public void testSendSearchResponseDisallowPartialFailures() { new IllegalArgumentException() ); } - action.sendSearchResponse( - new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), - phaseResults.results - ); + action.sendSearchResponse(SearchResponseSections.EMPTY_WITH_TOTAL_HITS, phaseResults.results); assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class)); SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException) exception.get(); assertEquals(0, searchPhaseExecutionException.getSuppressed().length); diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 63ac832b0723b..648cb8aa60158 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -91,10 +91,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL List mSearchResponses = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - mockSearchPhaseContext.sendSearchResponse( - new SearchResponseSections(collapsedHits.get(innerHitNum), null, null, false, null, null, 1), - null - ); + var sections = new SearchResponseSections(collapsedHits.get(innerHitNum), null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); } @@ -111,7 +113,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } }); @@ -194,7 +201,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } }); phase.run(); @@ -222,7 +234,12 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } }); phase.run(); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index a5c0c59867627..035d01108d655 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -46,12 +46,13 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL searchHits[i] = SearchHitTests.createTestItem(randomBoolean(), randomBoolean()); } SearchHits hits = new SearchHits(searchHits, new TotalHits(numHits, TotalHits.Relation.EQUAL_TO), 1.0f); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( - searchPhaseContext, - new SearchResponseSections(hits, null, null, false, null, null, 1), - null - ); - phase.run(); + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase(searchPhaseContext, sections, null); + phase.run(); + } finally { + sections.decRef(); + } searchPhaseContext.assertNoFailure(); assertNotNull(searchPhaseContext.searchResponse.get()); } finally { @@ -185,12 +186,13 @@ void sendExecuteMultiSearch( new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0f ); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( - searchPhaseContext, - new SearchResponseSections(searchHits, null, null, false, null, null, 1), - null - ); - phase.run(); + var sections = new SearchResponseSections(searchHits, null, null, false, null, null, 1); + try { + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase(searchPhaseContext, sections, null); + phase.run(); + } finally { + sections.decRef(); + } assertTrue(requestSent.get()); searchPhaseContext.assertNoFailure(); assertNotNull(searchPhaseContext.searchResponse.get()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 25f05d7e3a670..d6b1bd8057708 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -152,10 +152,7 @@ public void run() { assertTrue(searchPhaseDidRun.get()); assertEquals(shardsIter.size() - numSkipped, numRequests.get()); - asyncAction.sendSearchResponse( - new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), - null - ); + asyncAction.sendSearchResponse(SearchResponseSections.EMPTY_WITH_TOTAL_HITS, null); assertNotNull(searchResponse.get()); assertEquals(0, searchResponse.get().getFailedShards()); assertEquals(numSkipped, searchResponse.get().getSkippedShards()); @@ -698,10 +695,7 @@ public void run() { assertThat(latch.await(4, TimeUnit.SECONDS), equalTo(true)); assertThat(searchPhaseDidRun.get(), equalTo(true)); - asyncAction.sendSearchResponse( - new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), - null - ); + asyncAction.sendSearchResponse(SearchResponseSections.EMPTY_WITH_TOTAL_HITS, null); assertNotNull(searchResponse.get()); assertThat(searchResponse.get().getSkippedShards(), equalTo(numUnavailableSkippedShards)); assertThat(searchResponse.get().getFailedShards(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 19644d274aabf..ac88f999adef6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -292,8 +292,8 @@ public void testMerge() { reducedQueryPhase.suggest(), profile ); + final SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); try { - SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { @@ -346,6 +346,7 @@ public void testMerge() { assertThat(mergedResponse.profile(), is(anEmptyMap())); } } finally { + mergedResponse.decRef(); fetchResults.asList().forEach(TransportMessage::decRef); } } finally { @@ -410,8 +411,8 @@ protected boolean lessThan(RankDoc a, RankDoc b) { reducedQueryPhase.suggest(), false ); + SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); try { - SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { @@ -427,6 +428,7 @@ protected boolean lessThan(RankDoc a, RankDoc b) { assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs().scoreDocs().length)); assertThat(mergedResponse.profile(), is(anEmptyMap())); } finally { + mergedResponse.decRef(); fetchResults.asList().forEach(TransportMessage::decRef); } } finally { From df8202206a8e9c33c737ce3a0bfa3d8cb5f40298 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 9 Jan 2024 10:18:54 +0100 Subject: [PATCH 26/47] Avoid redundant BufferedInputStream when reading compressed x-content (#103998) A large number of the use cases for the decompressor involve reading x-content from the decompressed stream. Jackson itself does all the buffering we need here. We can avoid allocations and indirection not always returning a buffered input stream and instead wrapping a `StreamInput` on demand, including buffering (this is the only use case that I could find that requires buffering). --- .../PublicationTransportHandler.java | 3 +-- .../coordination/ValidateJoinRequest.java | 3 +-- .../common/compress/Compressor.java | 11 +++++++++++ .../common/compress/DeflateCompressor.java | 17 +++++++++++------ .../common/xcontent/XContentHelper.java | 8 ++------ .../transport/TransportLogger.java | 3 +-- .../PublicationTransportHandlerTests.java | 3 +-- .../xpack/core/async/AsyncTaskIndexService.java | 7 +++++-- .../eql/action/AsyncEqlSearchActionIT.java | 10 ++++++---- .../sql/action/AsyncSqlSearchActionIT.java | 9 ++++++--- .../xpack/sql/common/io/SqlStreamInput.java | 4 +--- 11 files changed, 46 insertions(+), 32 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java index 781a05d535b16..5c5c5eee17da3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.PositionTrackingOutputStreamStreamOutput; @@ -128,7 +127,7 @@ private PublishWithJoinResponse handleIncomingPublishRequest(BytesTransportReque StreamInput in = request.bytes().streamInput(); try { if (compressor != null) { - in = new InputStreamStreamInput(compressor.threadLocalInputStream(in)); + in = compressor.threadLocalStreamInput(in); } in = new NamedWriteableAwareStreamInput(in, namedWriteableRegistry); in.setTransportVersion(request.version()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java index c6463949f774f..20c1139884b24 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -52,7 +51,7 @@ private static ClusterState readCompressed( try ( var bytesStreamInput = bytes.streamInput(); var in = new NamedWriteableAwareStreamInput( - new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalInputStream(bytesStreamInput)), + CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), namedWriteableRegistry ) ) { diff --git a/server/src/main/java/org/elasticsearch/common/compress/Compressor.java b/server/src/main/java/org/elasticsearch/common/compress/Compressor.java index a6c4a9521d9b2..239f168306a94 100644 --- a/server/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/server/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -9,7 +9,10 @@ package org.elasticsearch.common.compress; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.StreamInput; +import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -18,6 +21,14 @@ public interface Compressor { boolean isCompressed(BytesReference bytes); + /** + * Same as {@link #threadLocalInputStream(InputStream)} but wraps the returned stream as a {@link StreamInput}. + */ + default StreamInput threadLocalStreamInput(InputStream in) throws IOException { + // wrap stream in buffer since InputStreamStreamInput doesn't do any buffering itself but does a lot of small reads + return new InputStreamStreamInput(new BufferedInputStream(threadLocalInputStream(in), DeflateCompressor.BUFFER_SIZE)); + } + /** * Creates a new input stream that decompresses the contents read from the provided input stream. * Closing the returned {@link InputStream} will close the provided stream input. diff --git a/server/src/main/java/org/elasticsearch/common/compress/DeflateCompressor.java b/server/src/main/java/org/elasticsearch/common/compress/DeflateCompressor.java index 00465855cb652..f14c906b9d64d 100644 --- a/server/src/main/java/org/elasticsearch/common/compress/DeflateCompressor.java +++ b/server/src/main/java/org/elasticsearch/common/compress/DeflateCompressor.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Streams; -import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; @@ -150,18 +149,24 @@ public static InputStream inputStream(InputStream in, boolean threadLocal) throw inflater = new Inflater(true); releasable = inflater::end; } - return new BufferedInputStream(new InflaterInputStream(in, inflater, BUFFER_SIZE) { + return new InflaterInputStream(in, inflater, BUFFER_SIZE) { + + private Releasable release = releasable; + @Override public void close() throws IOException { + if (release == null) { + return; + } try { super.close(); } finally { - // We are ensured to only call this once since we wrap this stream in a BufferedInputStream that will only close - // its delegate once - releasable.close(); + // We need to ensure that we only call this once + release.close(); + release = null; } } - }, BUFFER_SIZE); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 6723a24cfdf33..5c1870463149e 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -120,13 +120,9 @@ public static XContentParser createParser(XContentParserConfiguration config, By Objects.requireNonNull(xContentType); Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { - InputStream compressedInput = compressor.threadLocalInputStream(bytes.streamInput()); - if (compressedInput.markSupported() == false) { - compressedInput = new BufferedInputStream(compressedInput); - } - return XContentFactory.xContent(xContentType).createParser(config, compressedInput); + return XContentFactory.xContent(xContentType).createParser(config, compressor.threadLocalInputStream(bytes.streamInput())); } else { - // TODO now that we have config we make a method on bytes to do this building wihout needing this check everywhere + // TODO now that we have config we make a method on bytes to do this building without needing this check everywhere return createParserNotCompressed(config, bytes, xContentType); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java index be9e0070d05ba..485fd85e7ab7c 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java @@ -13,7 +13,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; @@ -159,7 +158,7 @@ private static String format(TcpChannel channel, InboundMessage message, String private static StreamInput decompressingStream(byte status, StreamInput streamInput) throws IOException { if (TransportStatus.isCompress(status) && streamInput.available() > 0) { try { - return new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalInputStream(streamInput)); + return CompressorFactory.COMPRESSOR.threadLocalStreamInput(streamInput); } catch (IllegalArgumentException e) { throw new IllegalStateException("stream marked as compressed, but is missing deflate header"); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java index 0cdc5de86a8d3..6fce5927a62dd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -147,7 +146,7 @@ private static boolean isDiff(BytesTransportRequest request, TransportVersion ve in = request.bytes().streamInput(); final Compressor compressor = CompressorFactory.compressor(request.bytes()); if (compressor != null) { - in = new InputStreamStreamInput(compressor.threadLocalInputStream(in)); + in = compressor.threadLocalStreamInput(in); } in.setTransportVersion(version); return in.readBoolean() == false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java index 4ab6337b90dd2..c20300db84a3d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java @@ -582,10 +582,13 @@ public int read() { }); TransportVersion version = TransportVersion.readVersion(new InputStreamStreamInput(encodedIn)); assert version.onOrBefore(TransportVersion.current()) : version + " >= " + TransportVersion.current(); + final StreamInput input; if (version.onOrAfter(TransportVersions.V_7_15_0)) { - encodedIn = CompressorFactory.COMPRESSOR.threadLocalInputStream(encodedIn); + input = CompressorFactory.COMPRESSOR.threadLocalStreamInput(encodedIn); + } else { + input = new InputStreamStreamInput(encodedIn); } - try (StreamInput in = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(encodedIn), registry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(input, registry)) { in.setTransportVersion(version); return reader.read(in); } diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java index 0ae114894948d..5387e5b08c2fd 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -41,7 +40,6 @@ import org.hamcrest.Description; import org.junit.After; -import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Base64; @@ -312,8 +310,12 @@ public StoredAsyncResponse getStoredRecord(String id) throws String value = doc.getSource().get("result").toString(); try (ByteBufferStreamInput buf = new ByteBufferStreamInput(ByteBuffer.wrap(Base64.getDecoder().decode(value)))) { TransportVersion version = TransportVersion.readVersion(buf); - final InputStream compressedIn = CompressorFactory.COMPRESSOR.threadLocalInputStream(buf); - try (StreamInput in = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(compressedIn), registry)) { + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + CompressorFactory.COMPRESSOR.threadLocalStreamInput(buf), + registry + ) + ) { in.setTransportVersion(version); return new StoredAsyncResponse<>(EqlSearchResponse::new, in); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java index ab8f0922097b5..55b88a633a58a 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java @@ -38,7 +38,6 @@ import org.elasticsearch.xpack.sql.plugin.SqlAsyncGetResultsAction; import org.junit.After; -import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Base64; @@ -301,8 +300,12 @@ public StoredAsyncResponse getStoredRecord(String id) throws E String value = doc.getSource().get("result").toString(); try (ByteBufferStreamInput buf = new ByteBufferStreamInput(ByteBuffer.wrap(Base64.getDecoder().decode(value)))) { TransportVersion version = TransportVersion.readVersion(buf); - final InputStream compressedIn = CompressorFactory.COMPRESSOR.threadLocalInputStream(buf); - try (StreamInput in = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(compressedIn), registry)) { + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalStreamInput(buf)), + registry + ) + ) { in.setTransportVersion(version); return new StoredAsyncResponse<>(SqlQueryResponse::new, in); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java index 456067fba6b04..0c6074357975c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java @@ -10,7 +10,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,8 +31,7 @@ public static SqlStreamInput fromString(String base64encoded, NamedWriteableRegi StreamInput in = StreamInput.wrap(bytes); TransportVersion inVersion = TransportVersion.readVersion(in); validateStreamVersion(version, inVersion); - InputStreamStreamInput uncompressingIn = new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalInputStream(in)); - return new SqlStreamInput(uncompressingIn, namedWriteableRegistry, inVersion); + return new SqlStreamInput(CompressorFactory.COMPRESSOR.threadLocalStreamInput(in), namedWriteableRegistry, inVersion); } /** From 3e8383e63c29ff32730ad7c660ddd7f7a0d75b41 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 9 Jan 2024 10:30:03 +0100 Subject: [PATCH 27/47] Expose async APIs as Serverless-public (#104100) This adds the needed decorators to expose the async APIs as public in Serverless. --- .../xpack/esql/action/RestEsqlAsyncQueryAction.java | 3 +++ .../xpack/esql/action/RestEsqlDeleteAsyncResultAction.java | 3 +++ .../xpack/esql/action/RestEsqlGetAsyncResultAction.java | 3 +++ 3 files changed, 9 insertions(+) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 04b37616b3ebf..3dea461ccf8b7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.xcontent.XContentParser; @@ -23,6 +25,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; +@ServerlessScope(Scope.PUBLIC) public class RestEsqlAsyncQueryAction extends BaseRestHandler { private static final Logger LOGGER = LogManager.getLogger(RestEsqlAsyncQueryAction.class); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java index d9b04dc040b8f..7a325bd16b29f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java @@ -11,6 +11,8 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; @@ -19,6 +21,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; +@ServerlessScope(Scope.PUBLIC) public class RestEsqlDeleteAsyncResultAction extends BaseRestHandler { @Override public List routes() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java index 9d83ed117be76..35a679e23d1f7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; +@ServerlessScope(Scope.PUBLIC) public class RestEsqlGetAsyncResultAction extends BaseRestHandler { @Override public List routes() { From ec2e18536da1f1246b1fe20ee5d23b66b94dea19 Mon Sep 17 00:00:00 2001 From: Nicolas Gras Date: Tue, 9 Jan 2024 04:37:42 -0500 Subject: [PATCH 28/47] Esql/create DATE_DIFF function (#103208) --- docs/changelog/103208.yaml | 6 + .../esql/functions/date_diff.asciidoc | 37 +++ .../esql/functions/signature/date_diff.svg | 1 + .../esql/functions/types/date_diff.asciidoc | 6 + .../src/main/resources/date.csv-spec | 61 +++++ .../src/main/resources/show.csv-spec | 4 +- .../date/DateDiffConstantEvaluator.java | 154 ++++++++++++ .../scalar/date/DateDiffEvaluator.java | 176 ++++++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../function/scalar/date/DateDiff.java | 222 ++++++++++++++++++ .../function/scalar/date/DateTimeField.java | 50 ++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 15 ++ .../function/scalar/date/DateDiffTests.java | 192 +++++++++++++++ 13 files changed, 925 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/103208.yaml create mode 100644 docs/reference/esql/functions/date_diff.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_diff.svg create mode 100644 docs/reference/esql/functions/types/date_diff.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java diff --git a/docs/changelog/103208.yaml b/docs/changelog/103208.yaml new file mode 100644 index 0000000000000..dbd25835b4bd7 --- /dev/null +++ b/docs/changelog/103208.yaml @@ -0,0 +1,6 @@ +pr: 103202 +summary: ES|QL - create DATE_DIFF function +area: ES|QL +type: enhancement +issues: + - 101942 diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc new file mode 100644 index 0000000000000..6127290466b10 --- /dev/null +++ b/docs/reference/esql/functions/date_diff.asciidoc @@ -0,0 +1,37 @@ +[discrete] +[[esql-date_diff]] +=== `DATE_DIFF` +Subtract the second argument from the third argument and return their difference in multiples of the unit specified in the first argument. +If the second argument (start) is greater than the third argument (end), then negative values are returned. + +[cols="^,^"] +|=== +2+h|Datetime difference units + +s|unit +s|abbreviations + +| year | years, yy, yyyy +| quarter | quarters, qq, q +| month | months, mm, m +| dayofyear | dy, y +| day | days, dd, d +| week | weeks, wk, ww +| weekday | weekdays, dw +| hour | hours, hh +| minute | minutes, mi, n +| second | seconds, ss, s +| millisecond | milliseconds, ms +| microsecond | microseconds, mcs +| nanosecond | nanoseconds, ns +|=== + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=dateDiff] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=dateDiff-result] +|=== + diff --git a/docs/reference/esql/functions/signature/date_diff.svg b/docs/reference/esql/functions/signature/date_diff.svg new file mode 100644 index 0000000000000..6563ec6576927 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_diff.svg @@ -0,0 +1 @@ +DATE_DIFF(unit,startTimestamp,endTimestamp) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc new file mode 100644 index 0000000000000..b4e5c6ad5e0b5 --- /dev/null +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -0,0 +1,6 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +unit | startTimestamp | endTimestamp | result +keyword | datetime | datetime | integer +text | datetime | datetime | integer +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 509257c4c8b4f..9eabbf9bf6c6d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -337,6 +337,67 @@ AVG(salary):double | bucket:date // end::auto_bucket_in_agg-result[] ; +evalDateDiffInNanoAndMicroAndMilliSeconds +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T11:00:00.001Z") +| EVAL dd_ns1=date_diff("nanoseconds", date1, date2), dd_ns2=date_diff("ns", date1, date2) +| EVAL dd_mcs1=date_diff("microseconds", date1, date2), dd_mcs2=date_diff("mcs", date1, date2) +| EVAL dd_ms1=date_diff("milliseconds", date1, date2), dd_ms2=date_diff("ms", date1, date2) +| keep dd_ns1, dd_ns2, dd_mcs1, dd_mcs2, dd_ms1, dd_ms2 +; + +dd_ns1:integer | dd_ns2:integer | dd_mcs1:integer | dd_mcs2:integer | dd_ms1:integer | dd_ms2:integer +1000000 | 1000000 | 1000 | 1000 | 1 | 1 +; + +evalDateDiffInSecondsAndMinutesAndHours +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T12:00:00.000Z") +| EVAL dd_s1=date_diff("seconds", date1, date2), dd_s2=date_diff("ss", date1, date2), dd_s3=date_diff("s", date1, date2) +| EVAL dd_m1=date_diff("minutes", date1, date2), dd_m2=date_diff("mi", date1, date2), dd_m3=date_diff("n", date1, date2) +| EVAL dd_h1=date_diff("hours", date1, date2), dd_h2=date_diff("hh", date1, date2) +| keep dd_s1, dd_s2, dd_s3, dd_m1, dd_m2, dd_m3, dd_h1, dd_h2 +; + +dd_s1:integer | dd_s2:integer | dd_s3:integer | dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_h1:integer | dd_h2:integer +3600 | 3600 | 3600 | 60 | 60 | 60 | 1 | 1 +; + +evalDateDiffInDaysAndWeeks +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-24T11:00:00.000Z") +| EVAL dd_wd1=date_diff("weekdays", date1, date2), dd_wd2=date_diff("dw", date1, date2) +| EVAL dd_w1=date_diff("weeks", date1, date2), dd_w2=date_diff("wk", date1, date2), dd_w3=date_diff("ww", date1, date2) +| EVAL dd_d1=date_diff("dy", date1, date2), dd_d2=date_diff("y", date1, date2) +| EVAL dd_dy1=date_diff("days", date1, date2), dd_dy2=date_diff("dd", date1, date2), dd_dy3=date_diff("d", date1, date2) +| keep dd_wd1, dd_wd2, dd_w1, dd_w2, dd_w3, dd_d1, dd_d2, dd_dy1, dd_dy2, dd_dy3 +; + +dd_wd1:integer | dd_wd2:integer | dd_w1:integer | dd_w2:integer | dd_w3:integer | dd_d1:integer | dd_d2:integer | dd_dy1:integer | dd_dy2:integer | dd_dy3:integer +22 | 22 | 3 | 3 | 3 | 22 | 22 | 22 | 22 | 22 +; + +evalDateDiffInMonthsAndQuartersAndYears +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2024-12-24T11:00:00.000Z") +| EVAL dd_m1=date_diff("months", date1, date2), dd_m2=date_diff("mm", date1, date2), dd_m3=date_diff("m", date1, date2) +| EVAL dd_q1=date_diff("quarters", date1, date2), dd_q2=date_diff("qq", date1, date2), dd_q3=date_diff("q", date1, date2) +| EVAL dd_y1=date_diff("years", date1, date2), dd_y2=date_diff("yyyy", date1, date2), dd_y3=date_diff("yy", date1, date2) +| keep dd_m1, dd_m2, dd_m3, dd_q1, dd_q2, dd_q3, dd_y1, dd_y2, dd_y3 +; + +dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_q1:integer | dd_q2:integer | dd_q3:integer | dd_y1:integer | dd_y2:integer | dd_y3:integer +12 | 12 | 12 | 4 | 4 | 4 | 1 | 1 | 1 +; + +evalDateDiffErrorOutOfIntegerRange +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-23T11:00:00.000Z") +| EVAL dd_oo=date_diff("nanoseconds", date1, date2) +| keep dd_oo +; +warning: Line 2:14: evaluation of [date_diff(\"nanoseconds\", date1, date2)] failed, treating result as null. Only first 20 failures recorded. +warning: Line 2:14: org.elasticsearch.xpack.ql.InvalidArgumentException: [1814400000000000] out of [integer] range + +dd_oo:integer +null +; + evalDateParseWithSimpleDate row a = "2023-02-01" | eval b = date_parse("yyyy-MM-dd", a) | keep b; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 024ccc9883be2..3aec009ec5aa3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -26,6 +26,7 @@ cos |"double cos(n:integer|long|double|unsigned_long)" cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_format |? date_format(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|[A valid date pattern, A string representing a date]|date |Parses a string into a date value | [true, false] | false @@ -117,6 +118,7 @@ synopsis:keyword "double cosh(n:integer|long|double|unsigned_long)" ? count(arg1:?) ? count_distinct(arg1:?, arg2:?) +"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" ? date_extract(arg1:?, arg2:?) ? date_format(arg1:?, arg2:?) "date date_parse(?datePattern:keyword, dateString:keyword|text)" @@ -209,5 +211,5 @@ countFunctions#[skip:-8.11.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -84 | 84 | 84 +85 | 85 | 85 ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java new file mode 100644 index 0000000000000..3cb41d0028d54 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java @@ -0,0 +1,154 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantEvaluator get(DriverContext context) { + return new DateDiffConstantEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java new file mode 100644 index 0000000000000..952a819a014a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java @@ -0,0 +1,176 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.process(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.process(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffEvaluator get(DriverContext context) { + return new DateDiffEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index b0cdad5095bbe..3b76141fa541e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -160,6 +161,7 @@ private FunctionDefinition[][] functions() { def(EndsWith.class, EndsWith::new, "ends_with") }, // date new FunctionDefinition[] { + def(DateDiff.class, DateDiff::new, "date_diff"), def(DateExtract.class, DateExtract::new, "date_extract"), def(DateFormat.class, DateFormat::new, "date_format"), def(DateParse.class, DateParse::new, "date_parse"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java new file mode 100644 index 0000000000000..63e8672ac620f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; +import java.time.temporal.IsoFields; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; + +/** + * Subtract the second argument from the third argument and return their difference + * in multiples of the unit specified in the first argument. + * If the second argument (start) is greater than the third argument (end), then negative values are returned. + */ +public class DateDiff extends ScalarFunction implements OptionalArgument, EvaluatorMapper { + + public static final ZoneId UTC = ZoneId.of("Z"); + + private final Expression unit; + private final Expression startTimestamp; + private final Expression endTimestamp; + + /** + * Represents units that can be used for DATE_DIFF function and how the difference + * between 2 dates is calculated + */ + public enum Part implements DateTimeField { + + YEAR((start, end) -> end.getYear() - start.getYear(), "years", "yyyy", "yy"), + QUARTER((start, end) -> safeToInt(IsoFields.QUARTER_YEARS.between(start, end)), "quarters", "qq", "q"), + MONTH((start, end) -> safeToInt(ChronoUnit.MONTHS.between(start, end)), "months", "mm", "m"), + DAYOFYEAR((start, end) -> safeToInt(ChronoUnit.DAYS.between(start, end)), "dy", "y"), + DAY(DAYOFYEAR::diff, "days", "dd", "d"), + WEEK((start, end) -> safeToInt(ChronoUnit.WEEKS.between(start, end)), "weeks", "wk", "ww"), + WEEKDAY(DAYOFYEAR::diff, "weekdays", "dw"), + HOUR((start, end) -> safeToInt(ChronoUnit.HOURS.between(start, end)), "hours", "hh"), + MINUTE((start, end) -> safeToInt(ChronoUnit.MINUTES.between(start, end)), "minutes", "mi", "n"), + SECOND((start, end) -> safeToInt(ChronoUnit.SECONDS.between(start, end)), "seconds", "ss", "s"), + MILLISECOND((start, end) -> safeToInt(ChronoUnit.MILLIS.between(start, end)), "milliseconds", "ms"), + MICROSECOND((start, end) -> safeToInt(ChronoUnit.MICROS.between(start, end)), "microseconds", "mcs"), + NANOSECOND((start, end) -> safeToInt(ChronoUnit.NANOS.between(start, end)), "nanoseconds", "ns"); + + private static final Map NAME_TO_PART = DateTimeField.initializeResolutionMap(values()); + + private final BiFunction diffFunction; + private final Set aliases; + + Part(BiFunction diffFunction, String... aliases) { + this.diffFunction = diffFunction; + this.aliases = Set.of(aliases); + } + + public Integer diff(ZonedDateTime startTimestamp, ZonedDateTime endTimestamp) { + return diffFunction.apply(startTimestamp, endTimestamp); + } + + @Override + public Iterable aliases() { + return aliases; + } + + public static Part resolve(String dateTimeUnit) { + Part datePartField = DateTimeField.resolveMatch(NAME_TO_PART, dateTimeUnit); + if (datePartField == null) { + List similar = DateTimeField.findSimilar(NAME_TO_PART.keySet(), dateTimeUnit); + String errorMessage; + if (similar.isEmpty() == false) { + errorMessage = String.format( + Locale.ROOT, + "Received value [%s] is not valid date part to add; did you mean %s?", + dateTimeUnit, + similar + ); + } + else { + errorMessage = String.format( + Locale.ROOT, + "A value of %s or their aliases is required; received [%s]", + Arrays.asList(Part.values()), + dateTimeUnit + ); + } + throw new IllegalArgumentException(errorMessage); + } + + return datePartField; + } + } + + @FunctionInfo( + returnType = "integer", + description = "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" + ) + public DateDiff( + Source source, + @Param(name = "unit", type = { "keyword", "text" }, description = "A valid date unit") Expression unit, + @Param( + name = "startTimestamp", + type = { "date" }, + description = "A string representing a start timestamp" + ) Expression startTimestamp, + @Param(name = "endTimestamp", type = { "date" }, description = "A string representing an end timestamp") Expression endTimestamp + ) { + super(source, List.of(unit, startTimestamp, endTimestamp)); + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int process(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestamp), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestamp), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return process(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); + ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); + + if (unit.foldable()) { + try { + Part datePartField = Part.resolve(((BytesRef) unit.fold()).utf8ToString()); + return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); + } catch (IllegalArgumentException e) { + throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); + } + } + ExpressionEvaluator.Factory unitEvaluator = toEvaluator.apply(unit); + return new DateDiffEvaluator.Factory(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(unit, sourceText(), FIRST).and(isDate(startTimestamp, sourceText(), SECOND)) + .and(isDate(endTimestamp, sourceText(), THIRD)); + + if (resolution.unresolved()) { + return resolution; + } + + return TypeResolution.TYPE_RESOLVED; + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Override + public boolean foldable() { + return unit.foldable() && startTimestamp.foldable() && endTimestamp.foldable(); + } + + @Override + public DataType dataType() { + return DataTypes.INTEGER; + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new DateDiff(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DateDiff::new, children().get(0), children().get(1), children().get(2)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java new file mode 100644 index 0000000000000..85651af67e8e3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import org.elasticsearch.xpack.ql.util.StringUtils; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +public interface DateTimeField { + + static Map initializeResolutionMap(D[] values) { + Map nameToPart = new HashMap<>(); + + for (D datePart : values) { + String lowerCaseName = datePart.name().toLowerCase(Locale.ROOT); + + nameToPart.put(lowerCaseName, datePart); + for (String alias : datePart.aliases()) { + nameToPart.put(alias, datePart); + } + } + return Collections.unmodifiableMap(nameToPart); + } + + static List initializeValidValues(D[] values) { + return Arrays.stream(values).map(D::name).collect(Collectors.toList()); + } + + static D resolveMatch(Map resolutionMap, String possibleMatch) { + return resolutionMap.get(possibleMatch.toLowerCase(Locale.ROOT)); + } + + static List findSimilar(Iterable similars, String match) { + return StringUtils.findSimilar(match, similars); + } + + String name(); + + Iterable aliases(); +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 8feb8fb18f443..4f03f7a7d72ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -350,6 +351,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, Concat.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), + of(ScalarFunction.class, DateDiff.class, PlanNamedTypes::writeDateDiff, PlanNamedTypes::readDateDiff), of(ScalarFunction.class, DateExtract.class, PlanNamedTypes::writeDateExtract, PlanNamedTypes::readDateExtract), of(ScalarFunction.class, DateFormat.class, PlanNamedTypes::writeDateFormat, PlanNamedTypes::readDateFormat), of(ScalarFunction.class, DateParse.class, PlanNamedTypes::writeDateTimeParse, PlanNamedTypes::readDateTimeParse), @@ -1294,6 +1296,19 @@ static void writeCountDistinct(PlanStreamOutput out, CountDistinct countDistinct out.writeOptionalWriteable(fields.size() == 2 ? o -> out.writeExpression(fields.get(1)) : null); } + static DateDiff readDateDiff(PlanStreamInput in) throws IOException { + return new DateDiff(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression()); + } + + static void writeDateDiff(PlanStreamOutput out, DateDiff function) throws IOException { + out.writeNoSource(); + List fields = function.children(); + assert fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeExpression(fields.get(2)); + } + static DateExtract readDateExtract(PlanStreamInput in) throws IOException { return new DateExtract(in.readSource(), in.readExpression(), in.readExpression(), in.configuration()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java new file mode 100644 index 0000000000000..15d0cca454407 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.ZonedDateTime; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateDiffTests extends AbstractFunctionTestCase { + public DateDiffTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:30Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-05T10:45:00Z"); + + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Date Diff In Seconds - OK", + List.of(DataTypes.KEYWORD, DataTypes.DATETIME, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataTypes.INTEGER, + equalTo(88170) + ) + ), + new TestCaseSupplier( + "Date Diff In Seconds with text- OK", + List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataTypes.INTEGER, + equalTo(88170) + ) + ), + new TestCaseSupplier( + "Date Diff Error Type unit", + List.of(DataTypes.INTEGER, DataTypes.DATETIME, DataTypes.DATETIME), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.INTEGER, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "first argument of [] must be [string], found value [unit] type [integer]" + ) + ), + new TestCaseSupplier( + "Date Diff Error Type startTimestamp", + List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.DATETIME), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.INTEGER, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "second argument of [] must be [datetime], found value [startTimestamp] type [integer]" + ) + ), + new TestCaseSupplier( + "Date Diff Error Type endTimestamp", + List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.INTEGER), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.INTEGER, "endTimestamp") + ), + "third argument of [] must be [datetime], found value [endTimestamp] type [integer]" + ) + ) + ) + ); + } + + public void testDateDiffFunction() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:15:01Z"); + long startTimestamp = zdtStart.toInstant().toEpochMilli(); + long endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + assertEquals(1000000000, DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp)); + assertEquals(1000000000, DateDiff.process(new BytesRef("ns"), startTimestamp, endTimestamp)); + assertEquals(1000000, DateDiff.process(new BytesRef("microseconds"), startTimestamp, endTimestamp)); + assertEquals(1000000, DateDiff.process(new BytesRef("mcs"), startTimestamp, endTimestamp)); + assertEquals(1000, DateDiff.process(new BytesRef("milliseconds"), startTimestamp, endTimestamp)); + assertEquals(1000, DateDiff.process(new BytesRef("ms"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("seconds"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("ss"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("s"), startTimestamp, endTimestamp)); + + zdtEnd = zdtEnd.plusYears(1); + endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + assertEquals(527040, DateDiff.process(new BytesRef("minutes"), startTimestamp, endTimestamp)); + assertEquals(527040, DateDiff.process(new BytesRef("mi"), startTimestamp, endTimestamp)); + assertEquals(527040, DateDiff.process(new BytesRef("n"), startTimestamp, endTimestamp)); + assertEquals(8784, DateDiff.process(new BytesRef("hours"), startTimestamp, endTimestamp)); + assertEquals(8784, DateDiff.process(new BytesRef("hh"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("weekdays"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dw"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("weeks"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("wk"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("ww"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("days"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dd"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("d"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dy"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("y"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("months"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("mm"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("m"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("quarters"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("qq"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("q"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("years"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("yyyy"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("yy"), startTimestamp, endTimestamp)); + } + + public void testDateDiffFunctionErrorTooLarge() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:20:00Z"); + long startTimestamp = zdtStart.toInstant().toEpochMilli(); + long endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + InvalidArgumentException e = expectThrows( + InvalidArgumentException.class, + () -> DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp) + ); + assertThat(e.getMessage(), containsString("[300000000000] out of [integer] range")); + } + + public void testDateDiffFunctionErrorUnitNotValid() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "Received value [sseconds] is not valid date part to add; " + + "did you mean [seconds, second, nanoseconds, milliseconds, microseconds, nanosecond]?" + ) + ); + + e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, " + + "NANOSECOND] or their aliases is required; received [not-valid-unit]" + ) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new DateDiff(source, args.get(0), args.get(1), args.get(2)); + } +} From 943b2eae70e7395ddd6b4a8c4ef36972082ab018 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 09:50:34 +0000 Subject: [PATCH 29/47] Revert "Esql/create DATE_DIFF function (#103208)" This reverts commit ec2e18536da1f1246b1fe20ee5d23b66b94dea19. --- docs/changelog/103208.yaml | 6 - .../esql/functions/date_diff.asciidoc | 37 --- .../esql/functions/signature/date_diff.svg | 1 - .../esql/functions/types/date_diff.asciidoc | 6 - .../src/main/resources/date.csv-spec | 61 ----- .../src/main/resources/show.csv-spec | 4 +- .../date/DateDiffConstantEvaluator.java | 154 ------------ .../scalar/date/DateDiffEvaluator.java | 176 -------------- .../function/EsqlFunctionRegistry.java | 2 - .../function/scalar/date/DateDiff.java | 222 ------------------ .../function/scalar/date/DateTimeField.java | 50 ---- .../xpack/esql/io/stream/PlanNamedTypes.java | 15 -- .../function/scalar/date/DateDiffTests.java | 192 --------------- 13 files changed, 1 insertion(+), 925 deletions(-) delete mode 100644 docs/changelog/103208.yaml delete mode 100644 docs/reference/esql/functions/date_diff.asciidoc delete mode 100644 docs/reference/esql/functions/signature/date_diff.svg delete mode 100644 docs/reference/esql/functions/types/date_diff.asciidoc delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java diff --git a/docs/changelog/103208.yaml b/docs/changelog/103208.yaml deleted file mode 100644 index dbd25835b4bd7..0000000000000 --- a/docs/changelog/103208.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103202 -summary: ES|QL - create DATE_DIFF function -area: ES|QL -type: enhancement -issues: - - 101942 diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc deleted file mode 100644 index 6127290466b10..0000000000000 --- a/docs/reference/esql/functions/date_diff.asciidoc +++ /dev/null @@ -1,37 +0,0 @@ -[discrete] -[[esql-date_diff]] -=== `DATE_DIFF` -Subtract the second argument from the third argument and return their difference in multiples of the unit specified in the first argument. -If the second argument (start) is greater than the third argument (end), then negative values are returned. - -[cols="^,^"] -|=== -2+h|Datetime difference units - -s|unit -s|abbreviations - -| year | years, yy, yyyy -| quarter | quarters, qq, q -| month | months, mm, m -| dayofyear | dy, y -| day | days, dd, d -| week | weeks, wk, ww -| weekday | weekdays, dw -| hour | hours, hh -| minute | minutes, mi, n -| second | seconds, ss, s -| millisecond | milliseconds, ms -| microsecond | microseconds, mcs -| nanosecond | nanoseconds, ns -|=== - -[source.merge.styled,esql] ----- -include::{esql-specs}/docs.csv-spec[tag=dateDiff] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/docs.csv-spec[tag=dateDiff-result] -|=== - diff --git a/docs/reference/esql/functions/signature/date_diff.svg b/docs/reference/esql/functions/signature/date_diff.svg deleted file mode 100644 index 6563ec6576927..0000000000000 --- a/docs/reference/esql/functions/signature/date_diff.svg +++ /dev/null @@ -1 +0,0 @@ -DATE_DIFF(unit,startTimestamp,endTimestamp) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc deleted file mode 100644 index b4e5c6ad5e0b5..0000000000000 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ /dev/null @@ -1,6 +0,0 @@ -[%header.monospaced.styled,format=dsv,separator=|] -|=== -unit | startTimestamp | endTimestamp | result -keyword | datetime | datetime | integer -text | datetime | datetime | integer -|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 9eabbf9bf6c6d..509257c4c8b4f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -337,67 +337,6 @@ AVG(salary):double | bucket:date // end::auto_bucket_in_agg-result[] ; -evalDateDiffInNanoAndMicroAndMilliSeconds -ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T11:00:00.001Z") -| EVAL dd_ns1=date_diff("nanoseconds", date1, date2), dd_ns2=date_diff("ns", date1, date2) -| EVAL dd_mcs1=date_diff("microseconds", date1, date2), dd_mcs2=date_diff("mcs", date1, date2) -| EVAL dd_ms1=date_diff("milliseconds", date1, date2), dd_ms2=date_diff("ms", date1, date2) -| keep dd_ns1, dd_ns2, dd_mcs1, dd_mcs2, dd_ms1, dd_ms2 -; - -dd_ns1:integer | dd_ns2:integer | dd_mcs1:integer | dd_mcs2:integer | dd_ms1:integer | dd_ms2:integer -1000000 | 1000000 | 1000 | 1000 | 1 | 1 -; - -evalDateDiffInSecondsAndMinutesAndHours -ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T12:00:00.000Z") -| EVAL dd_s1=date_diff("seconds", date1, date2), dd_s2=date_diff("ss", date1, date2), dd_s3=date_diff("s", date1, date2) -| EVAL dd_m1=date_diff("minutes", date1, date2), dd_m2=date_diff("mi", date1, date2), dd_m3=date_diff("n", date1, date2) -| EVAL dd_h1=date_diff("hours", date1, date2), dd_h2=date_diff("hh", date1, date2) -| keep dd_s1, dd_s2, dd_s3, dd_m1, dd_m2, dd_m3, dd_h1, dd_h2 -; - -dd_s1:integer | dd_s2:integer | dd_s3:integer | dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_h1:integer | dd_h2:integer -3600 | 3600 | 3600 | 60 | 60 | 60 | 1 | 1 -; - -evalDateDiffInDaysAndWeeks -ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-24T11:00:00.000Z") -| EVAL dd_wd1=date_diff("weekdays", date1, date2), dd_wd2=date_diff("dw", date1, date2) -| EVAL dd_w1=date_diff("weeks", date1, date2), dd_w2=date_diff("wk", date1, date2), dd_w3=date_diff("ww", date1, date2) -| EVAL dd_d1=date_diff("dy", date1, date2), dd_d2=date_diff("y", date1, date2) -| EVAL dd_dy1=date_diff("days", date1, date2), dd_dy2=date_diff("dd", date1, date2), dd_dy3=date_diff("d", date1, date2) -| keep dd_wd1, dd_wd2, dd_w1, dd_w2, dd_w3, dd_d1, dd_d2, dd_dy1, dd_dy2, dd_dy3 -; - -dd_wd1:integer | dd_wd2:integer | dd_w1:integer | dd_w2:integer | dd_w3:integer | dd_d1:integer | dd_d2:integer | dd_dy1:integer | dd_dy2:integer | dd_dy3:integer -22 | 22 | 3 | 3 | 3 | 22 | 22 | 22 | 22 | 22 -; - -evalDateDiffInMonthsAndQuartersAndYears -ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2024-12-24T11:00:00.000Z") -| EVAL dd_m1=date_diff("months", date1, date2), dd_m2=date_diff("mm", date1, date2), dd_m3=date_diff("m", date1, date2) -| EVAL dd_q1=date_diff("quarters", date1, date2), dd_q2=date_diff("qq", date1, date2), dd_q3=date_diff("q", date1, date2) -| EVAL dd_y1=date_diff("years", date1, date2), dd_y2=date_diff("yyyy", date1, date2), dd_y3=date_diff("yy", date1, date2) -| keep dd_m1, dd_m2, dd_m3, dd_q1, dd_q2, dd_q3, dd_y1, dd_y2, dd_y3 -; - -dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_q1:integer | dd_q2:integer | dd_q3:integer | dd_y1:integer | dd_y2:integer | dd_y3:integer -12 | 12 | 12 | 4 | 4 | 4 | 1 | 1 | 1 -; - -evalDateDiffErrorOutOfIntegerRange -ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-23T11:00:00.000Z") -| EVAL dd_oo=date_diff("nanoseconds", date1, date2) -| keep dd_oo -; -warning: Line 2:14: evaluation of [date_diff(\"nanoseconds\", date1, date2)] failed, treating result as null. Only first 20 failures recorded. -warning: Line 2:14: org.elasticsearch.xpack.ql.InvalidArgumentException: [1814400000000000] out of [integer] range - -dd_oo:integer -null -; - evalDateParseWithSimpleDate row a = "2023-02-01" | eval b = date_parse("yyyy-MM-dd", a) | keep b; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 3aec009ec5aa3..024ccc9883be2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -26,7 +26,6 @@ cos |"double cos(n:integer|long|double|unsigned_long)" cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_format |? date_format(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|[A valid date pattern, A string representing a date]|date |Parses a string into a date value | [true, false] | false @@ -118,7 +117,6 @@ synopsis:keyword "double cosh(n:integer|long|double|unsigned_long)" ? count(arg1:?) ? count_distinct(arg1:?, arg2:?) -"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" ? date_extract(arg1:?, arg2:?) ? date_format(arg1:?, arg2:?) "date date_parse(?datePattern:keyword, dateString:keyword|text)" @@ -211,5 +209,5 @@ countFunctions#[skip:-8.11.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -85 | 85 | 85 +84 | 84 | 84 ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java deleted file mode 100644 index 3cb41d0028d54..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); - } - } - } - - public IntBlock eval(int positionCount, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, LongVector startTimestampVector, - LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(startTimestamp, endTimestamp); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final DateDiff.Part datePartFieldUnit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, DateDiff.Part datePartFieldUnit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.datePartFieldUnit = datePartFieldUnit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffConstantEvaluator get(DriverContext context) { - return new DateDiffConstantEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java deleted file mode 100644 index 952a819a014a9..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. - * This class is generated. Do not edit it. - */ -public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator unit; - - private final EvalOperator.ExpressionEvaluator startTimestamp; - - private final EvalOperator.ExpressionEvaluator endTimestamp; - - private final DriverContext driverContext; - - public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, - EvalOperator.ExpressionEvaluator startTimestamp, - EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - this.driverContext = driverContext; - } - - @Override - public Block eval(Page page) { - try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { - try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { - try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { - BytesRefVector unitVector = unitBlock.asVector(); - if (unitVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector startTimestampVector = startTimestampBlock.asVector(); - if (startTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - LongVector endTimestampVector = endTimestampBlock.asVector(); - if (endTimestampVector == null) { - return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); - } - return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); - } - } - } - } - - public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, - LongBlock endTimestampBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - if (unitBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (unitBlock.getValueCount(p) != 1) { - if (unitBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (startTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (startTimestampBlock.getValueCount(p) != 1) { - if (startTimestampBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (endTimestampBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (endTimestampBlock.getValueCount(p) != 1) { - if (endTimestampBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(DateDiff.process(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, BytesRefVector unitVector, - LongVector startTimestampVector, LongVector endTimestampVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - BytesRef unitScratch = new BytesRef(); - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(DateDiff.process(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); - } catch (IllegalArgumentException | InvalidArgumentException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory unit; - - private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; - - private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, - EvalOperator.ExpressionEvaluator.Factory startTimestamp, - EvalOperator.ExpressionEvaluator.Factory endTimestamp) { - this.source = source; - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Override - public DateDiffEvaluator get(DriverContext context) { - return new DateDiffEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); - } - - @Override - public String toString() { - return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3b76141fa541e..b0cdad5095bbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -161,7 +160,6 @@ private FunctionDefinition[][] functions() { def(EndsWith.class, EndsWith::new, "ends_with") }, // date new FunctionDefinition[] { - def(DateDiff.class, DateDiff::new, "date_diff"), def(DateExtract.class, DateExtract::new, "date_extract"), def(DateFormat.class, DateFormat::new, "date_format"), def(DateParse.class, DateParse::new, "date_parse"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java deleted file mode 100644 index 63e8672ac620f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; -import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; -import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; -import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.ql.tree.NodeInfo; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; - -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.temporal.ChronoUnit; -import java.time.temporal.IsoFields; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.function.BiFunction; -import java.util.function.Function; - -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; - -/** - * Subtract the second argument from the third argument and return their difference - * in multiples of the unit specified in the first argument. - * If the second argument (start) is greater than the third argument (end), then negative values are returned. - */ -public class DateDiff extends ScalarFunction implements OptionalArgument, EvaluatorMapper { - - public static final ZoneId UTC = ZoneId.of("Z"); - - private final Expression unit; - private final Expression startTimestamp; - private final Expression endTimestamp; - - /** - * Represents units that can be used for DATE_DIFF function and how the difference - * between 2 dates is calculated - */ - public enum Part implements DateTimeField { - - YEAR((start, end) -> end.getYear() - start.getYear(), "years", "yyyy", "yy"), - QUARTER((start, end) -> safeToInt(IsoFields.QUARTER_YEARS.between(start, end)), "quarters", "qq", "q"), - MONTH((start, end) -> safeToInt(ChronoUnit.MONTHS.between(start, end)), "months", "mm", "m"), - DAYOFYEAR((start, end) -> safeToInt(ChronoUnit.DAYS.between(start, end)), "dy", "y"), - DAY(DAYOFYEAR::diff, "days", "dd", "d"), - WEEK((start, end) -> safeToInt(ChronoUnit.WEEKS.between(start, end)), "weeks", "wk", "ww"), - WEEKDAY(DAYOFYEAR::diff, "weekdays", "dw"), - HOUR((start, end) -> safeToInt(ChronoUnit.HOURS.between(start, end)), "hours", "hh"), - MINUTE((start, end) -> safeToInt(ChronoUnit.MINUTES.between(start, end)), "minutes", "mi", "n"), - SECOND((start, end) -> safeToInt(ChronoUnit.SECONDS.between(start, end)), "seconds", "ss", "s"), - MILLISECOND((start, end) -> safeToInt(ChronoUnit.MILLIS.between(start, end)), "milliseconds", "ms"), - MICROSECOND((start, end) -> safeToInt(ChronoUnit.MICROS.between(start, end)), "microseconds", "mcs"), - NANOSECOND((start, end) -> safeToInt(ChronoUnit.NANOS.between(start, end)), "nanoseconds", "ns"); - - private static final Map NAME_TO_PART = DateTimeField.initializeResolutionMap(values()); - - private final BiFunction diffFunction; - private final Set aliases; - - Part(BiFunction diffFunction, String... aliases) { - this.diffFunction = diffFunction; - this.aliases = Set.of(aliases); - } - - public Integer diff(ZonedDateTime startTimestamp, ZonedDateTime endTimestamp) { - return diffFunction.apply(startTimestamp, endTimestamp); - } - - @Override - public Iterable aliases() { - return aliases; - } - - public static Part resolve(String dateTimeUnit) { - Part datePartField = DateTimeField.resolveMatch(NAME_TO_PART, dateTimeUnit); - if (datePartField == null) { - List similar = DateTimeField.findSimilar(NAME_TO_PART.keySet(), dateTimeUnit); - String errorMessage; - if (similar.isEmpty() == false) { - errorMessage = String.format( - Locale.ROOT, - "Received value [%s] is not valid date part to add; did you mean %s?", - dateTimeUnit, - similar - ); - } - else { - errorMessage = String.format( - Locale.ROOT, - "A value of %s or their aliases is required; received [%s]", - Arrays.asList(Part.values()), - dateTimeUnit - ); - } - throw new IllegalArgumentException(errorMessage); - } - - return datePartField; - } - } - - @FunctionInfo( - returnType = "integer", - description = "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" - ) - public DateDiff( - Source source, - @Param(name = "unit", type = { "keyword", "text" }, description = "A valid date unit") Expression unit, - @Param( - name = "startTimestamp", - type = { "date" }, - description = "A string representing a start timestamp" - ) Expression startTimestamp, - @Param(name = "endTimestamp", type = { "date" }, description = "A string representing an end timestamp") Expression endTimestamp - ) { - super(source, List.of(unit, startTimestamp, endTimestamp)); - this.unit = unit; - this.startTimestamp = startTimestamp; - this.endTimestamp = endTimestamp; - } - - @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { - ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestamp), UTC); - ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestamp), UTC); - return datePartFieldUnit.diff(zdtStart, zdtEnd); - } - - @Evaluator(warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { - return process(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); - } - - @Override - public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); - ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); - - if (unit.foldable()) { - try { - Part datePartField = Part.resolve(((BytesRef) unit.fold()).utf8ToString()); - return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); - } catch (IllegalArgumentException e) { - throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); - } - } - ExpressionEvaluator.Factory unitEvaluator = toEvaluator.apply(unit); - return new DateDiffEvaluator.Factory(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); - } - - @Override - protected TypeResolution resolveType() { - if (childrenResolved() == false) { - return new TypeResolution("Unresolved children"); - } - - TypeResolution resolution = isString(unit, sourceText(), FIRST).and(isDate(startTimestamp, sourceText(), SECOND)) - .and(isDate(endTimestamp, sourceText(), THIRD)); - - if (resolution.unresolved()) { - return resolution; - } - - return TypeResolution.TYPE_RESOLVED; - } - - @Override - public Object fold() { - return EvaluatorMapper.super.fold(); - } - - @Override - public boolean foldable() { - return unit.foldable() && startTimestamp.foldable() && endTimestamp.foldable(); - } - - @Override - public DataType dataType() { - return DataTypes.INTEGER; - } - - @Override - public ScriptTemplate asScript() { - throw new UnsupportedOperationException("functions do not support scripting"); - } - - @Override - public Expression replaceChildren(List newChildren) { - return new DateDiff(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, DateDiff::new, children().get(0), children().get(1), children().get(2)); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java deleted file mode 100644 index 85651af67e8e3..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import org.elasticsearch.xpack.ql.util.StringUtils; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.stream.Collectors; - -public interface DateTimeField { - - static Map initializeResolutionMap(D[] values) { - Map nameToPart = new HashMap<>(); - - for (D datePart : values) { - String lowerCaseName = datePart.name().toLowerCase(Locale.ROOT); - - nameToPart.put(lowerCaseName, datePart); - for (String alias : datePart.aliases()) { - nameToPart.put(alias, datePart); - } - } - return Collections.unmodifiableMap(nameToPart); - } - - static List initializeValidValues(D[] values) { - return Arrays.stream(values).map(D::name).collect(Collectors.toList()); - } - - static D resolveMatch(Map resolutionMap, String possibleMatch) { - return resolutionMap.get(possibleMatch.toLowerCase(Locale.ROOT)); - } - - static List findSimilar(Iterable similars, String match) { - return StringUtils.findSimilar(match, similars); - } - - String name(); - - Iterable aliases(); -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 4f03f7a7d72ff..8feb8fb18f443 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -51,7 +51,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -351,7 +350,6 @@ public static List namedTypeEntries() { of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, Concat.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), - of(ScalarFunction.class, DateDiff.class, PlanNamedTypes::writeDateDiff, PlanNamedTypes::readDateDiff), of(ScalarFunction.class, DateExtract.class, PlanNamedTypes::writeDateExtract, PlanNamedTypes::readDateExtract), of(ScalarFunction.class, DateFormat.class, PlanNamedTypes::writeDateFormat, PlanNamedTypes::readDateFormat), of(ScalarFunction.class, DateParse.class, PlanNamedTypes::writeDateTimeParse, PlanNamedTypes::readDateTimeParse), @@ -1296,19 +1294,6 @@ static void writeCountDistinct(PlanStreamOutput out, CountDistinct countDistinct out.writeOptionalWriteable(fields.size() == 2 ? o -> out.writeExpression(fields.get(1)) : null); } - static DateDiff readDateDiff(PlanStreamInput in) throws IOException { - return new DateDiff(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression()); - } - - static void writeDateDiff(PlanStreamOutput out, DateDiff function) throws IOException { - out.writeNoSource(); - List fields = function.children(); - assert fields.size() == 3; - out.writeExpression(fields.get(0)); - out.writeExpression(fields.get(1)); - out.writeExpression(fields.get(2)); - } - static DateExtract readDateExtract(PlanStreamInput in) throws IOException { return new DateExtract(in.readSource(), in.readExpression(), in.readExpression(), in.configuration()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java deleted file mode 100644 index 15d0cca454407..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; -import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataTypes; - -import java.time.ZonedDateTime; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class DateDiffTests extends AbstractFunctionTestCase { - public DateDiffTests(@Name("TestCase") Supplier testCaseSupplier) { - this.testCase = testCaseSupplier.get(); - } - - @ParametersFactory - public static Iterable parameters() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:30Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-05T10:45:00Z"); - - return parameterSuppliersFromTypedData( - List.of( - new TestCaseSupplier( - "Date Diff In Seconds - OK", - List.of(DataTypes.KEYWORD, DataTypes.DATETIME, DataTypes.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.KEYWORD, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") - ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " - + "endTimestamp=Attribute[channel=2]]", - DataTypes.INTEGER, - equalTo(88170) - ) - ), - new TestCaseSupplier( - "Date Diff In Seconds with text- OK", - List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") - ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " - + "endTimestamp=Attribute[channel=2]]", - DataTypes.INTEGER, - equalTo(88170) - ) - ), - new TestCaseSupplier( - "Date Diff Error Type unit", - List.of(DataTypes.INTEGER, DataTypes.DATETIME, DataTypes.DATETIME), - () -> TestCaseSupplier.TestCase.typeError( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.INTEGER, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") - ), - "first argument of [] must be [string], found value [unit] type [integer]" - ) - ), - new TestCaseSupplier( - "Date Diff Error Type startTimestamp", - List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.DATETIME), - () -> TestCaseSupplier.TestCase.typeError( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.INTEGER, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") - ), - "second argument of [] must be [datetime], found value [startTimestamp] type [integer]" - ) - ), - new TestCaseSupplier( - "Date Diff Error Type endTimestamp", - List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.INTEGER), - () -> TestCaseSupplier.TestCase.typeError( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.INTEGER, "endTimestamp") - ), - "third argument of [] must be [datetime], found value [endTimestamp] type [integer]" - ) - ) - ) - ); - } - - public void testDateDiffFunction() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:15:01Z"); - long startTimestamp = zdtStart.toInstant().toEpochMilli(); - long endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - assertEquals(1000000000, DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp)); - assertEquals(1000000000, DateDiff.process(new BytesRef("ns"), startTimestamp, endTimestamp)); - assertEquals(1000000, DateDiff.process(new BytesRef("microseconds"), startTimestamp, endTimestamp)); - assertEquals(1000000, DateDiff.process(new BytesRef("mcs"), startTimestamp, endTimestamp)); - assertEquals(1000, DateDiff.process(new BytesRef("milliseconds"), startTimestamp, endTimestamp)); - assertEquals(1000, DateDiff.process(new BytesRef("ms"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("seconds"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("ss"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("s"), startTimestamp, endTimestamp)); - - zdtEnd = zdtEnd.plusYears(1); - endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - assertEquals(527040, DateDiff.process(new BytesRef("minutes"), startTimestamp, endTimestamp)); - assertEquals(527040, DateDiff.process(new BytesRef("mi"), startTimestamp, endTimestamp)); - assertEquals(527040, DateDiff.process(new BytesRef("n"), startTimestamp, endTimestamp)); - assertEquals(8784, DateDiff.process(new BytesRef("hours"), startTimestamp, endTimestamp)); - assertEquals(8784, DateDiff.process(new BytesRef("hh"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("weekdays"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dw"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("weeks"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("wk"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("ww"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("days"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dd"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("d"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dy"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("y"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("months"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("mm"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("m"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("quarters"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("qq"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("q"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("years"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("yyyy"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("yy"), startTimestamp, endTimestamp)); - } - - public void testDateDiffFunctionErrorTooLarge() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:20:00Z"); - long startTimestamp = zdtStart.toInstant().toEpochMilli(); - long endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - InvalidArgumentException e = expectThrows( - InvalidArgumentException.class, - () -> DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp) - ); - assertThat(e.getMessage(), containsString("[300000000000] out of [integer] range")); - } - - public void testDateDiffFunctionErrorUnitNotValid() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); - assertThat( - e.getMessage(), - containsString( - "Received value [sseconds] is not valid date part to add; " - + "did you mean [seconds, second, nanoseconds, milliseconds, microseconds, nanosecond]?" - ) - ); - - e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); - assertThat( - e.getMessage(), - containsString( - "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, " - + "NANOSECOND] or their aliases is required; received [not-valid-unit]" - ) - ); - } - - @Override - protected Expression build(Source source, List args) { - return new DateDiff(source, args.get(0), args.get(1), args.get(2)); - } -} From 07780a828244cd98e13606a6c903ad4d78bcd43e Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 9 Jan 2024 11:05:00 +0100 Subject: [PATCH 30/47] Add metric name validation (#103388) This commit adds a minimum metric name validation which checks: metric name starts with es. prefix metric name is using . as a separator of elements metric name is using characters from a white list validate min number of elements = 3 elements ( prefix, group and the suffix name) validate max number of elements and max characters per element validate the suffix element in a metric name to be from the enumerated allow list It also modifies existing metric names to adhere to those rules --- modules/apm/NAMING.md | 23 +- .../telemetry/apm/AbstractInstrument.java | 10 +- .../apm/internal/MetricNameValidator.java | 142 ++++++++ .../telemetry/apm/APMMeterRegistryTests.java | 39 +-- .../apm/MeterRegistryConcurrencyTests.java | 2 +- .../internal/MetricNameValidatorTests.java | 102 ++++++ .../metrics/AsyncCountersAdapterTests.java | 8 +- .../internal/metrics/GaugeAdapterTests.java | 13 +- .../s3/S3BlobStoreRepositoryMetricsTests.java | 50 +-- .../s3/S3BlobStoreRepositoryTests.java | 4 +- .../allocator/DesiredBalanceReconciler.java | 14 +- .../health/HealthPeriodicLogger.java | 5 +- .../monitor/metrics/NodeMetrics.java | 12 +- .../repositories/RepositoriesMetrics.java | 20 +- .../test/apmintegration/MetricsApmIT.java | 16 +- .../test/apmintegration/TestMeterUsages.java | 19 +- x | 320 ++++++++++++++++++ .../org/elasticsearch/xpack/ml/MlMetrics.java | 42 +-- 18 files changed, 700 insertions(+), 141 deletions(-) create mode 100644 modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java create mode 100644 modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java create mode 100644 x diff --git a/modules/apm/NAMING.md b/modules/apm/NAMING.md index 8e8d1bf2463e2..31cad34d0470a 100644 --- a/modules/apm/NAMING.md +++ b/modules/apm/NAMING.md @@ -17,13 +17,13 @@ The **hierarchy** should be built by putting "more common" elements at the begin Example: * prefer `es.indices.docs.deleted.total `to `es.indices.total.deleted.docs` -* This way you can later add` es.indices.docs.count, es.indices.docs.ingested.total`, etc.) +* This way you can later add` es.indices.docs.total, es.indices.docs.ingested.total`, etc.) Prefix metrics: * Always use `es` as our root application name: this will give us a separate namespace and avoid any possibility of clashes with other metrics, and quick identification of Elasticsearch metrics on a dashboard. * Follow the root prefix with a simple module name, team or area of code. E.g. `snapshot, repositories, indices, threadpool`. Notice the mix of singular and plural - here this is intentional, to reflect closely the existing names in the codebase (e.g. `reindex` and `indices`) -* In building a metric name, look for existing prefixes (e.g. module name and/or area of code, e.g. `blob_cache`) and for existing sub-elements as well (e.g. `error`) to build a good, consistent name. E.g. prefer the consistent use of `error.count` rather than introducing `failures`, `failed.count` or `errors`.` ` -* Avoid having sub-metrics under a name that is also a metric (e.g. do not create names like `es.repositories.elements`,` es.repositories.elements.utilization`; use` es.repositories.element.count` and` es.repositories.element.utilization `instead). Such metrics are hard to handle well in Elasticsearch, or in some internal structures (e.g. nested maps). +* In building a metric name, look for existing prefixes (e.g. module name and/or area of code, e.g. `blob_cache`) and for existing sub-elements as well (e.g. `error`) to build a good, consistent name. E.g. prefer the consistent use of `error.total` rather than introducing `failures`, `failed.total` or `errors`.` ` +* Avoid having sub-metrics under a name that is also a metric (e.g. do not create names like `es.repositories.elements`,` es.repositories.elements.utilization`; use` es.repositories.element.total` and` es.repositories.element.utilization `instead). Such metrics are hard to handle well in Elasticsearch, or in some internal structures (e.g. nested maps). Keep the hierarchy compact: do not add elements if you don’t need to. There is a description field when registering a metric, prefer using that as an explanation. \ For example, if emitting existing metrics from node stats, do not use the whole “object path”, but choose the most significant terms. @@ -35,7 +35,7 @@ The metric name can be generated but there should be no dynamic or variable cont * Rule of thumb: you should be able to do aggregations (e.g. sum, avg) across a dimension of a given metric (without the need to aggregate over different metric names); on the other hand, any aggregation across any dimension of a given metric should be meaningful. * There might be exceptions of course. For example: * When similar metrics have significantly different implementations/related metrics. \ - If we have only common metrics like `es.repositories.element.count, es.repositories.element.utilization, es.repositories.writes.total` for every blob storage implementation, then `s3,azure` should be an attribute. \ + If we have only common metrics like `es.repositories.element.total, es.repositories.element.utilization, es.repositories.writes.total` for every blob storage implementation, then `s3,azure` should be an attribute. \ If we have specific metrics, e.g. for s3 storage classes, prefer using prefixed metric names for the specific metrics: es.repositories.s3.deep_archive_access.total (but keep `es.repositories.elements`) * When you have a finite and fixed set of names it might be OK to have them in the name (e.g. "`young`" and "`old`" for GC generations). @@ -47,12 +47,19 @@ Examples : * es.indices.storage.write.io, instead of es.indices.storage.write.bytes_per_sec * These can all be composed with the suffixes below, e.g. es.process.jvm.collection.time.total, es.indices.storage.write.total to represent the monotonic sum of time spent in GC and the total number of bytes written to indices respectively. -**Pluralization** and **suffixes**: -* If the metric is unit-less, use plural: `es.threadpool.activethreads`, `es.indices.docs` -* Use `total` as a suffix for monotonic sums (e.g. es.indices.docs.deleted.total) -* Use `count` to represent the count of "things" in the metric name/namespace (e.g. if we have `es.process.jvm.classes.loaded`, we will express the number of classes currently loaded by the JVM as es.process.jvm.classes.loaded.count, and the total number of classes loaded since the JVM started as es.process.jvm.classes.loaded.total +**Suffixes**: +* Use `total` as a suffix for monotonic metrics (always increasing counter) (e.g. es.indices.docs.deleted.total) + * Note: even though async counter is reporting a total cumulative value, it is till monotonic. +* Use `current` to represent the non-monotonic metrics (like gauges, upDownCounters) + * e.g. `current` vs `total` We can have es.process.jvm.classes.loaded.current to express the number of classes currently loaded by the JVM, and the total number of classes loaded since the JVM started as es.process.jvm.classes.loaded.total * Use `ratio` to represent the ratio of two measures with identical unit (or unit-less) or measures that represent a fraction in the range [0, 1]. Examples: * Exception: consider using utilization when the ratio is between a usage and its limit, e.g. the ratio between es.process.jvm.heap.usage and es.process.jvm.heap.limit should be es.process.jvm.heap.utilization +* Use `status` to represent enum like gauges. example es.health.overall.red.status have values 1/0 to represent true/false +* Use `usage` to represent the amount used ouf of the known resource size +* Use `size` to represent the overall size of the resource measured +* Use `utilisation` to represent a fraction of usage out of the overall size of a resource measured +* Use `histogram` to represent instruments of type histogram +* Use `time` to represent passage of time * If it has a unit of measure, then it should not be plural (and also not include the unit of measure, see above). Examples: es.process.jvm.collection.time, es.process.mem.virtual.usage, es.indices.storage.utilization ### Attributes diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java index 72c6ccf905873..9329556ff0f3f 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java @@ -11,6 +11,7 @@ import io.opentelemetry.api.metrics.Meter; import org.elasticsearch.core.Nullable; +import org.elasticsearch.telemetry.apm.internal.MetricNameValidator; import org.elasticsearch.telemetry.metric.Instrument; import java.security.AccessController; @@ -23,6 +24,7 @@ * An instrument that contains the name, description and unit. The delegate may be replaced when * the provider is updated. * Subclasses should implement the builder, which is used on initialization and provider updates. + * * @param delegated instrument */ public abstract class AbstractInstrument implements Instrument { @@ -50,19 +52,13 @@ void setProvider(@Nullable Meter meter) { } protected abstract static class Builder { - private static final int MAX_NAME_LENGTH = 255; protected final String name; protected final String description; protected final String unit; public Builder(String name, String description, String unit) { - if (name.length() > MAX_NAME_LENGTH) { - throw new IllegalArgumentException( - "Instrument name [" + name + "] with length [" + name.length() + "] exceeds maximum length [" + MAX_NAME_LENGTH + "]" - ); - } - this.name = Objects.requireNonNull(name); + this.name = MetricNameValidator.validate(name); this.description = Objects.requireNonNull(description); this.unit = Objects.requireNonNull(unit); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java new file mode 100644 index 0000000000000..1a698b778687c --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidator.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal; + +import java.util.Objects; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public class MetricNameValidator { + private static final Pattern ALLOWED_CHARACTERS = Pattern.compile("[a-z][a-z0-9_]*"); + static final Set ALLOWED_SUFFIXES = Set.of( + "total", + "current", + "ratio", + "status" /*a workaround for enums */, + "usage", + "size", + "utilization", + "histogram", + "time" + ); + static final int MAX_METRIC_NAME_LENGTH = 255; + + static final int MAX_ELEMENT_LENGTH = 30; + static final int MAX_NUMBER_OF_ELEMENTS = 10; + + private MetricNameValidator() {} + + /** + * Validates a metric name as per guidelines in Naming.md + * + * @param metricName metric name to be validated + * @throws IllegalArgumentException an exception indicating an incorrect metric name + */ + public static String validate(String metricName) { + Objects.requireNonNull(metricName); + validateMaxMetricNameLength(metricName); + + String[] elements = metricName.split("\\."); + hasESPrefix(elements, metricName); + hasAtLeast3Elements(elements, metricName); + hasNotBreachNumberOfElementsLimit(elements, metricName); + lastElementIsFromAllowList(elements, metricName); + perElementValidations(elements, metricName); + return metricName; + } + + private static void validateMaxMetricNameLength(String metricName) { + if (metricName.length() > MAX_METRIC_NAME_LENGTH) { + throw new IllegalArgumentException( + "Metric name length " + + metricName.length() + + "is longer than max metric name length:" + + MAX_METRIC_NAME_LENGTH + + " Name was: " + + metricName + ); + } + } + + private static void lastElementIsFromAllowList(String[] elements, String name) { + String lastElement = elements[elements.length - 1]; + if (ALLOWED_SUFFIXES.contains(lastElement) == false) { + throw new IllegalArgumentException( + "Metric name should end with one of [" + + ALLOWED_SUFFIXES.stream().collect(Collectors.joining(",")) + + "] " + + "Last element was: " + + lastElement + + ". " + + "Name was: " + + name + ); + } + } + + private static void hasNotBreachNumberOfElementsLimit(String[] elements, String name) { + if (elements.length > MAX_NUMBER_OF_ELEMENTS) { + throw new IllegalArgumentException( + "Metric name should have at most 10 elements. It had: " + elements.length + ". The name was: " + name + ); + } + } + + private static void hasAtLeast3Elements(String[] elements, String name) { + if (elements.length < 3) { + throw new IllegalArgumentException( + "Metric name consist of at least 3 elements. An es. prefix, group and a name. The name was: " + name + ); + } + } + + private static void hasESPrefix(String[] elements, String name) { + if (elements[0].equals("es") == false) { + throw new IllegalArgumentException( + "Metric name should start with \"es.\" prefix and use \".\" as a separator. Name was: " + name + ); + } + } + + private static void perElementValidations(String[] elements, String name) { + for (String element : elements) { + hasOnlyAllowedCharacters(element, name); + hasNotBreachLengthLimit(element, name); + } + } + + private static void hasNotBreachLengthLimit(String element, String name) { + if (element.length() > MAX_ELEMENT_LENGTH) { + throw new IllegalArgumentException( + "Metric name's element should not be longer than " + + MAX_ELEMENT_LENGTH + + " characters. Was: " + + element.length() + + ". Name was: " + + name + ); + } + } + + private static void hasOnlyAllowedCharacters(String element, String name) { + Matcher matcher = ALLOWED_CHARACTERS.matcher(element); + if (matcher.matches() == false) { + throw new IllegalArgumentException( + "Metric name should only use [a-z][a-z0-9_]* characters. " + + "Element does not match: \"" + + element + + "\". " + + "Name was: " + + name + ); + } + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java index 778ca108dc5fe..8144b8f9a33b4 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java @@ -35,10 +35,8 @@ import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; public class APMMeterRegistryTests extends ESTestCase { @@ -84,8 +82,8 @@ public void testMeterIsOverridden() { public void testLookupByName() { var apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel).getMeterRegistry(); - DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); - DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); + DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("es.test.name.total", "desc", "unit"); + DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("es.test.name.total"); assertThat(lookedUpCounter, sameInstance(registeredCounter)); } @@ -103,19 +101,6 @@ public void testNoopIsSetOnStop() { assertThat(meter, sameInstance(noopOtel)); } - public void testMaxNameLength() { - APMMeterService apmMeter = new APMMeterService(TELEMETRY_ENABLED, () -> testOtel, () -> noopOtel); - apmMeter.start(); - int max_length = 255; - var counter = apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length), "desc", "count"); - assertThat(counter, instanceOf(LongCounter.class)); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> apmMeter.getMeterRegistry().registerLongCounter("a".repeat(max_length + 1), "desc", "count") - ); - assertThat(iae.getMessage(), containsString("exceeds maximum length [255]")); - } - public void testAllInstrumentsSwitchProviders() { TestAPMMeterService apmMeter = new TestAPMMeterService( Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build(), @@ -125,18 +110,18 @@ public void testAllInstrumentsSwitchProviders() { APMMeterRegistry registry = apmMeter.getMeterRegistry(); Supplier doubleObserver = () -> new DoubleWithAttributes(1.5, Collections.emptyMap()); - DoubleCounter dc = registry.registerDoubleCounter("dc", "", ""); - DoubleUpDownCounter dudc = registry.registerDoubleUpDownCounter("dudc", "", ""); - DoubleHistogram dh = registry.registerDoubleHistogram("dh", "", ""); - DoubleAsyncCounter dac = registry.registerDoubleAsyncCounter("dac", "", "", doubleObserver); - DoubleGauge dg = registry.registerDoubleGauge("dg", "", "", doubleObserver); + DoubleCounter dc = registry.registerDoubleCounter("es.test.dc.total", "", ""); + DoubleUpDownCounter dudc = registry.registerDoubleUpDownCounter("es.test.dudc.current", "", ""); + DoubleHistogram dh = registry.registerDoubleHistogram("es.test.dh.histogram", "", ""); + DoubleAsyncCounter dac = registry.registerDoubleAsyncCounter("es.test.dac.total", "", "", doubleObserver); + DoubleGauge dg = registry.registerDoubleGauge("es.test.dg.current", "", "", doubleObserver); Supplier longObserver = () -> new LongWithAttributes(100, Collections.emptyMap()); - LongCounter lc = registry.registerLongCounter("lc", "", ""); - LongUpDownCounter ludc = registry.registerLongUpDownCounter("ludc", "", ""); - LongHistogram lh = registry.registerLongHistogram("lh", "", ""); - LongAsyncCounter lac = registry.registerLongAsyncCounter("lac", "", "", longObserver); - LongGauge lg = registry.registerLongGauge("lg", "", "", longObserver); + LongCounter lc = registry.registerLongCounter("es.test.lc.total", "", ""); + LongUpDownCounter ludc = registry.registerLongUpDownCounter("es.test.ludc.total", "", ""); + LongHistogram lh = registry.registerLongHistogram("es.test.lh.histogram", "", ""); + LongAsyncCounter lac = registry.registerLongAsyncCounter("es.test.lac.total", "", "", longObserver); + LongGauge lg = registry.registerLongGauge("es.test.lg.current", "", "", longObserver); apmMeter.setEnabled(true); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java index e8dafd996f5b0..11951a9bf1072 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java @@ -28,7 +28,7 @@ import static org.hamcrest.Matchers.sameInstance; public class MeterRegistryConcurrencyTests extends ESTestCase { - private final String name = "name"; + private final String name = "es.test.name.total"; private final String description = "desc"; private final String unit = "kg"; private final Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java new file mode 100644 index 0000000000000..64f78d0af494c --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/MetricNameValidatorTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class MetricNameValidatorTests extends ESTestCase { + public void testMetricNameNotNull() { + String metricName = "es.somemodule.somemetric.total"; + assertThat(MetricNameValidator.validate(metricName), equalTo(metricName)); + + expectThrows(NullPointerException.class, () -> MetricNameValidator.validate(null)); + } + + public void testMaxMetricNameLength() { + MetricNameValidator.validate(metricNameWithLength(255)); + + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate(metricNameWithLength(256))); + } + + public void testESPrefixAndDotSeparator() { + MetricNameValidator.validate("es.somemodule.somemetric.total"); + + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("somemodule.somemetric.total")); + // verify . is a separator + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es_somemodule_somemetric_total")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es_somemodule.somemetric.total")); + } + + public void testNameElementRegex() { + MetricNameValidator.validate("es.somemodulename0.somemetric.total"); + MetricNameValidator.validate("es.some_module_name0.somemetric.total"); + MetricNameValidator.validate("es.s.somemetric.total"); + + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.someModuleName0.somemetric.total")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.SomeModuleName.somemetric.total")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.0some_module_name0.somemetric.total")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.some_#_name0.somemetric.total")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.some-name0.somemetric.total")); + } + + public void testNameHas3Elements() { + MetricNameValidator.validate("es.group.total"); + MetricNameValidator.validate("es.group.subgroup.total"); + + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.")); + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.sth")); + } + + public void testNumberOfElementsLimit() { + MetricNameValidator.validate("es.a2.a3.a4.a5.a6.a7.a8.a9.total"); + + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.a2.a3.a4.a5.a6.a7.a8.a9.a10.total")); + } + + public void testElementLengthLimit() { + MetricNameValidator.validate("es." + "a".repeat(MetricNameValidator.MAX_ELEMENT_LENGTH) + ".total"); + + expectThrows( + IllegalArgumentException.class, + () -> MetricNameValidator.validate("es." + "a".repeat(MetricNameValidator.MAX_ELEMENT_LENGTH + 1) + ".total") + ); + } + + public void testLastElementAllowList() { + for (String suffix : MetricNameValidator.ALLOWED_SUFFIXES) { + MetricNameValidator.validate("es.somemodule.somemetric." + suffix); + } + expectThrows(IllegalArgumentException.class, () -> MetricNameValidator.validate("es.somemodule.somemetric.some_other_suffix")); + } + + public static String metricNameWithLength(int length) { + int prefixAndSuffix = "es.".length() + ".utilization".length(); + assert length > prefixAndSuffix : "length too short"; + + var remainingChars = length - prefixAndSuffix; + StringBuilder metricName = new StringBuilder("es."); + var i = 0; + while (i < remainingChars) { + metricName.append("a"); + i++; + for (int j = 0; j < MetricNameValidator.MAX_ELEMENT_LENGTH - 1 && i < remainingChars; j++) { + metricName.append("x"); + i++; + } + metricName.append("."); + i++; + + } + metricName.append("utilization"); + return metricName.toString(); + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/AsyncCountersAdapterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/AsyncCountersAdapterTests.java index 3e23b741e01e5..24b40063cd636 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/AsyncCountersAdapterTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/AsyncCountersAdapterTests.java @@ -38,7 +38,7 @@ public void init() { // testing that a value reported is then used in a callback public void testLongAsyncCounter() throws Exception { AtomicReference attrs = new AtomicReference<>(); - LongAsyncCounter longAsyncCounter = registry.registerLongAsyncCounter("name", "desc", "unit", attrs::get); + LongAsyncCounter longAsyncCounter = registry.registerLongAsyncCounter("es.test.name.total", "desc", "unit", attrs::get); attrs.set(new LongWithAttributes(1L, Map.of("k", 1L))); @@ -70,7 +70,7 @@ public void testLongAsyncCounter() throws Exception { public void testDoubleAsyncAdapter() throws Exception { AtomicReference attrs = new AtomicReference<>(); - DoubleAsyncCounter doubleAsyncCounter = registry.registerDoubleAsyncCounter("name", "desc", "unit", attrs::get); + DoubleAsyncCounter doubleAsyncCounter = registry.registerDoubleAsyncCounter("es.test.name.total", "desc", "unit", attrs::get); attrs.set(new DoubleWithAttributes(1.0, Map.of("k", 1.0))); @@ -102,7 +102,7 @@ public void testDoubleAsyncAdapter() throws Exception { public void testNullGaugeRecord() throws Exception { DoubleAsyncCounter dcounter = registry.registerDoubleAsyncCounter( - "name", + "es.test.name.total", "desc", "unit", new AtomicReference()::get @@ -112,7 +112,7 @@ public void testNullGaugeRecord() throws Exception { assertThat(metrics, hasSize(0)); LongAsyncCounter lcounter = registry.registerLongAsyncCounter( - "name", + "es.test.name.total", "desc", "unit", new AtomicReference()::get diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java index 10f2d58768d48..d5e605df1d096 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java @@ -38,7 +38,7 @@ public void init() { // testing that a value reported is then used in a callback public void testLongGaugeRecord() throws Exception { AtomicReference attrs = new AtomicReference<>(); - LongGauge gauge = registry.registerLongGauge("name", "desc", "unit", attrs::get); + LongGauge gauge = registry.registerLongGauge("es.test.name.total", "desc", "unit", attrs::get); attrs.set(new LongWithAttributes(1L, Map.of("k", 1L))); @@ -71,7 +71,7 @@ public void testLongGaugeRecord() throws Exception { // testing that a value reported is then used in a callback public void testDoubleGaugeRecord() throws Exception { AtomicReference attrs = new AtomicReference<>(); - DoubleGauge gauge = registry.registerDoubleGauge("name", "desc", "unit", attrs::get); + DoubleGauge gauge = registry.registerDoubleGauge("es.test.name.total", "desc", "unit", attrs::get); attrs.set(new DoubleWithAttributes(1.0d, Map.of("k", 1L))); @@ -102,12 +102,17 @@ public void testDoubleGaugeRecord() throws Exception { } public void testNullGaugeRecord() throws Exception { - DoubleGauge dgauge = registry.registerDoubleGauge("name", "desc", "unit", new AtomicReference()::get); + DoubleGauge dgauge = registry.registerDoubleGauge( + "es.test.name.total", + "desc", + "unit", + new AtomicReference()::get + ); otelMeter.collectMetrics(); List metrics = otelMeter.getRecorder().getMeasurements(dgauge); assertThat(metrics, hasSize(0)); - LongGauge lgauge = registry.registerLongGauge("name", "desc", "unit", new AtomicReference()::get); + LongGauge lgauge = registry.registerLongGauge("es.test.name.total", "desc", "unit", new AtomicReference()::get); otelMeter.collectMetrics(); metrics = otelMeter.getRecorder().getMeasurements(lgauge); assertThat(metrics, hasSize(0)); diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java index 59f65032272df..f8503bca3ec67 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java @@ -38,13 +38,13 @@ import java.util.concurrent.LinkedBlockingQueue; import static org.elasticsearch.repositories.RepositoriesMetrics.HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_EXCEPTIONS_COUNT; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_EXCEPTIONS_HISTOGRAM; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_OPERATIONS_COUNT; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_REQUESTS_COUNT; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_COUNT; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_EXCEPTIONS_TOTAL; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_OPERATIONS_TOTAL; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_REQUESTS_TOTAL; import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_HISTOGRAM; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_COUNT; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_THROTTLES_TOTAL; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL; import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.TOO_MANY_REQUESTS; @@ -104,11 +104,11 @@ public void testMetricsWithErrors() throws IOException { final long batch = i + 1; addErrorStatus(INTERNAL_SERVER_ERROR, TOO_MANY_REQUESTS, TOO_MANY_REQUESTS); blobContainer.writeBlob(purpose, blobName, new BytesArray("blob"), false); - assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.PUT_OBJECT), equalTo(4L * batch)); - assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.PUT_OBJECT), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.PUT_OBJECT), equalTo(0L)); - assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.PUT_OBJECT), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.PUT_OBJECT), equalTo(2L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.PUT_OBJECT), equalTo(4L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_TOTAL, Operation.PUT_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, Operation.PUT_OBJECT), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.PUT_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_TOTAL, Operation.PUT_OBJECT), equalTo(2L * batch)); assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.PUT_OBJECT), equalTo(batch)); assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.PUT_OBJECT), equalTo(2L * batch)); assertThat(getNumberOfMeasurements(plugin, HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM, Operation.PUT_OBJECT), equalTo(batch)); @@ -124,11 +124,11 @@ public void testMetricsWithErrors() throws IOException { } catch (Exception e) { // intentional failure } - assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.GET_OBJECT), equalTo(2L * batch)); - assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.GET_OBJECT), equalTo(2L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_TOTAL, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_TOTAL, Operation.GET_OBJECT), equalTo(batch)); assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.GET_OBJECT), equalTo(batch)); assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.GET_OBJECT), equalTo(batch)); assertThat(getNumberOfMeasurements(plugin, HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM, Operation.GET_OBJECT), equalTo(batch)); @@ -144,11 +144,11 @@ public void testMetricsWithErrors() throws IOException { } catch (Exception e) { // intentional failure } - assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.LIST_OBJECTS), equalTo(5L * batch)); - assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); - assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.LIST_OBJECTS), equalTo(5L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.LIST_OBJECTS), equalTo(5L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_TOTAL, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_TOTAL, Operation.LIST_OBJECTS), equalTo(5L * batch)); assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.LIST_OBJECTS), equalTo(batch)); assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.LIST_OBJECTS), equalTo(5L * batch)); assertThat(getNumberOfMeasurements(plugin, HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM, Operation.LIST_OBJECTS), equalTo(batch)); @@ -156,11 +156,11 @@ public void testMetricsWithErrors() throws IOException { // Delete to clean up blobContainer.deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blobName)); - assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.DELETE_OBJECTS), equalTo(1L)); - assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(1L)); - assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); - assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); - assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_TOTAL, Operation.DELETE_OBJECTS), equalTo(1L)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_TOTAL, Operation.DELETE_OBJECTS), equalTo(1L)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_TOTAL, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_TOTAL, Operation.DELETE_OBJECTS), equalTo(0L)); assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.DELETE_OBJECTS), equalTo(0L)); assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.DELETE_OBJECTS), equalTo(0L)); assertThat(getNumberOfMeasurements(plugin, HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM, Operation.DELETE_OBJECTS), equalTo(1L)); diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 29342a7f5ea92..e70151cbdf8ee 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -75,7 +75,7 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; -import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_REQUESTS_COUNT; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_REQUESTS_TOTAL; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -267,7 +267,7 @@ public void testMetrics() throws Exception { .filterPlugins(TestTelemetryPlugin.class) .toList(); assertThat(plugins, hasSize(1)); - final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_COUNT)); + final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_TOTAL)); assertThat( statsCollectors.keySet().stream().map(S3BlobStore.StatsKey::operation).collect(Collectors.toSet()), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index dc3cbfa8b5ae8..95b0d23b564a2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -98,24 +98,24 @@ public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool thre unassignedShards = LongGaugeMetric.create( meterRegistry, - "es.allocator.desired_balance.shards.unassigned", - "Unassigned shards count", + "es.allocator.desired_balance.shards.unassigned.current", + "Current number of unassigned shards", "{shard}" ); totalAllocations = LongGaugeMetric.create( meterRegistry, - "es.allocator.desired_balance.shards.count", - "Total shards count", + "es.allocator.desired_balance.shards.current", + "Total number of shards", "{shard}" ); undesiredAllocations = LongGaugeMetric.create( meterRegistry, - "es.allocator.desired_balance.allocations.undesired", - "Count of shards allocated on undesired nodes", + "es.allocator.desired_balance.allocations.undesired.current", + "Total number of shards allocated on undesired nodes", "{shard}" ); undesiredAllocationsRatio = meterRegistry.registerDoubleGauge( - "es.allocator.desired_balance.allocations.undesired_ratio", + "es.allocator.desired_balance.allocations.undesired.ratio", "Ratio of undesired allocations to shard count", "1", () -> { diff --git a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java index 878930c2962d0..288837fb3c808 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java +++ b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java @@ -204,7 +204,10 @@ private HealthPeriodicLogger( this.logWriter = logWriter == null ? logger::info : logWriter; // create metric for overall level metrics - this.redMetrics.put("overall", LongGaugeMetric.create(this.meterRegistry, "es.health.overall.red", "Overall: Red", "{cluster}")); + this.redMetrics.put( + "overall", + LongGaugeMetric.create(this.meterRegistry, "es.health.overall.red.status", "Overall: Red", "{cluster}") + ); } private void registerListeners() { diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index a7d93ec7e7d80..476ad516aab80 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -116,7 +116,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.translog.operations.count", + "es.translog.operations.total", "Number of transaction log operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) @@ -134,7 +134,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.translog.uncommitted_operations.count", + "es.translog.uncommitted_operations.total", "Number of uncommitted transaction log operations.", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) @@ -224,7 +224,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.indexing.docs.count", + "es.indexing.docs.total", "Current number of indexing documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCurrent()) @@ -251,7 +251,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.indices.deletion.docs.count", + "es.indices.deletion.docs.total", "Current number of deleting documents", "documents", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCurrent()) @@ -323,7 +323,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.indexing.coordinating_operations.count", + "es.indexing.coordinating_operations.total", "Current number of coordinating operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingOps()) @@ -368,7 +368,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.indexing.primary_operations.count", + "es.indexing.primary_operations.total", "Current number of primary operations", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryOps()) diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 8442cf8c4a341..b4d79d89ec4c6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -25,22 +25,22 @@ public record RepositoriesMetrics( public static RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP); - public static final String METRIC_REQUESTS_COUNT = "es.repositories.requests.count"; - public static final String METRIC_EXCEPTIONS_COUNT = "es.repositories.exceptions.count"; - public static final String METRIC_THROTTLES_COUNT = "es.repositories.throttles.count"; - public static final String METRIC_OPERATIONS_COUNT = "es.repositories.operations.count"; - public static final String METRIC_UNSUCCESSFUL_OPERATIONS_COUNT = "es.repositories.operations.unsuccessful.count"; + public static final String METRIC_REQUESTS_TOTAL = "es.repositories.requests.total"; + public static final String METRIC_EXCEPTIONS_TOTAL = "es.repositories.exceptions.total"; + public static final String METRIC_THROTTLES_TOTAL = "es.repositories.throttles.total"; + public static final String METRIC_OPERATIONS_TOTAL = "es.repositories.operations.total"; + public static final String METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL = "es.repositories.operations.unsuccessful.total"; public static final String METRIC_EXCEPTIONS_HISTOGRAM = "es.repositories.exceptions.histogram"; public static final String METRIC_THROTTLES_HISTOGRAM = "es.repositories.throttles.histogram"; public static final String HTTP_REQUEST_TIME_IN_MICROS_HISTOGRAM = "es.repositories.requests.http_request_time.histogram"; public RepositoriesMetrics(MeterRegistry meterRegistry) { this( - meterRegistry.registerLongCounter(METRIC_REQUESTS_COUNT, "repository request counter", "unit"), - meterRegistry.registerLongCounter(METRIC_EXCEPTIONS_COUNT, "repository request exception counter", "unit"), - meterRegistry.registerLongCounter(METRIC_THROTTLES_COUNT, "repository request throttle counter", "unit"), - meterRegistry.registerLongCounter(METRIC_OPERATIONS_COUNT, "repository operation counter", "unit"), - meterRegistry.registerLongCounter(METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, "repository unsuccessful operation counter", "unit"), + meterRegistry.registerLongCounter(METRIC_REQUESTS_TOTAL, "repository request counter", "unit"), + meterRegistry.registerLongCounter(METRIC_EXCEPTIONS_TOTAL, "repository request exception counter", "unit"), + meterRegistry.registerLongCounter(METRIC_THROTTLES_TOTAL, "repository request throttle counter", "unit"), + meterRegistry.registerLongCounter(METRIC_OPERATIONS_TOTAL, "repository operation counter", "unit"), + meterRegistry.registerLongCounter(METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, "repository unsuccessful operation counter", "unit"), meterRegistry.registerLongHistogram(METRIC_EXCEPTIONS_HISTOGRAM, "repository request exception histogram", "unit"), meterRegistry.registerLongHistogram(METRIC_THROTTLES_HISTOGRAM, "repository request throttle histogram", "unit"), meterRegistry.registerLongHistogram( diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java index 93d08fbccd376..ddcd667b9cbe7 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java @@ -60,19 +60,19 @@ protected String getTestRestCluster() { public void testApmIntegration() throws Exception { Map>> sampleAssertions = new HashMap<>( Map.ofEntries( - assertion(TestMeterUsages.VERY_LONG_NAME, m -> (Double) m.get("value"), closeTo(1.0, 0.001)), - assertion("testLongCounter", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), - assertion("testAsyncDoubleCounter", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), - assertion("testAsyncLongCounter", m -> (Integer) m.get("value"), equalTo(1)), - assertion("testDoubleGauge", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), - assertion("testLongGauge", m -> (Integer) m.get("value"), equalTo(1)), + assertion("es.test.long_counter.total", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), + assertion("es.test.double_counter.total", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), + assertion("es.test.async_double_counter.total", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), + assertion("es.test.async_long_counter.total", m -> (Integer) m.get("value"), equalTo(1)), + assertion("es.test.double_gauge.current", m -> (Double) m.get("value"), closeTo(1.0, 0.001)), + assertion("es.test.long_gauge.current", m -> (Integer) m.get("value"), equalTo(1)), assertion( - "testDoubleHistogram", + "es.test.double_histogram.histogram", m -> ((Collection) m.get("counts")).stream().mapToInt(Integer::intValue).sum(), equalTo(2) ), assertion( - "testLongHistogram", + "es.test.long_histogram.histogram", m -> ((Collection) m.get("counts")).stream().mapToInt(Integer::intValue).sum(), equalTo(2) ) diff --git a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java index 8a71738a0b420..9c23ce371e044 100644 --- a/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java +++ b/test/external-modules/apm-integration/src/main/java/org/elasticsearch/test/apmintegration/TestMeterUsages.java @@ -26,18 +26,17 @@ public class TestMeterUsages { private final LongHistogram longHistogram; private final AtomicReference doubleWithAttributes = new AtomicReference<>(); private final AtomicReference longWithAttributes = new AtomicReference<>(); - public static String VERY_LONG_NAME = "a1234567890123456789012345678901234567890123456789012345678901234567890"; public TestMeterUsages(MeterRegistry meterRegistry) { - this.doubleCounter = meterRegistry.registerDoubleCounter(VERY_LONG_NAME, "test", "unit"); - this.longCounter = meterRegistry.registerDoubleCounter("testLongCounter", "test", "unit"); - this.doubleHistogram = meterRegistry.registerDoubleHistogram("testDoubleHistogram", "test", "unit"); - this.longHistogram = meterRegistry.registerLongHistogram("testLongHistogram", "test", "unit"); - meterRegistry.registerDoubleGauge("testDoubleGauge", "test", "unit", doubleWithAttributes::get); - meterRegistry.registerLongGauge("testLongGauge", "test", "unit", longWithAttributes::get); - - meterRegistry.registerLongAsyncCounter("testAsyncLongCounter", "test", "unit", longWithAttributes::get); - meterRegistry.registerDoubleAsyncCounter("testAsyncDoubleCounter", "test", "unit", doubleWithAttributes::get); + this.doubleCounter = meterRegistry.registerDoubleCounter("es.test.long_counter.total", "test", "unit"); + this.longCounter = meterRegistry.registerDoubleCounter("es.test.double_counter.total", "test", "unit"); + this.doubleHistogram = meterRegistry.registerDoubleHistogram("es.test.double_histogram.histogram", "test", "unit"); + this.longHistogram = meterRegistry.registerLongHistogram("es.test.long_histogram.histogram", "test", "unit"); + meterRegistry.registerDoubleGauge("es.test.double_gauge.current", "test", "unit", doubleWithAttributes::get); + meterRegistry.registerLongGauge("es.test.long_gauge.current", "test", "unit", longWithAttributes::get); + + meterRegistry.registerLongAsyncCounter("es.test.async_long_counter.total", "test", "unit", longWithAttributes::get); + meterRegistry.registerDoubleAsyncCounter("es.test.async_double_counter.total", "test", "unit", doubleWithAttributes::get); } public void testUponRequest() { diff --git a/x b/x new file mode 100644 index 0000000000000..444b7eef651c6 --- /dev/null +++ b/x @@ -0,0 +1,320 @@ +➜ elasticsearch git:(metric_name_validation) ./gradlew run -Dtests.es.logger.org.elasticsearch.telemetry.apm=debug + +> Configure project :x-pack:plugin:searchable-snapshots:qa:hdfs +hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\bin in PATH +======================================= +Elasticsearch Build Hamster says Hello! + Gradle Version : 8.5 + OS Info : Mac OS X 14.1.2 (aarch64) + JDK Version : 17.0.2+8-LTS-86 (Oracle) + JAVA_HOME : /Library/Java/JavaVirtualMachines/jdk-17.0.2.jdk/Contents/Home + Random Testing Seed : B705DEF03AA4BF36 + In FIPS 140 mode : false +======================================= + +> Task :run +[2023-12-18T13:31:10.882688Z] [BUILD] Copying additional config files from distro [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/jvm.options.d, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/users_roles, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/role_mapping.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/users, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/elasticsearch.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/roles.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/log4j2.properties, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/elasticsearch-plugins.example.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/jvm.options] +[2023-12-18T13:31:10.885813Z] [BUILD] Creating elasticsearch keystore with password set to [] +[2023-12-18T13:31:11.630988Z] [BUILD] Adding 1 keystore settings and 0 keystore files +[2023-12-18T13:31:12.293546Z] [BUILD] Installing 0 modules +[2023-12-18T13:31:12.293769Z] [BUILD] Setting up 1 users +[2023-12-18T13:31:13.096132Z] [BUILD] Setting up roles.yml +[2023-12-18T13:31:13.096494Z] [BUILD] Starting Elasticsearch process +CompileCommand: exclude org/apache/lucene/util/MSBRadixSorter.computeCommonPrefixLengthAndBuildHistogram bool exclude = true +CompileCommand: exclude org/apache/lucene/util/RadixSelector.computeCommonPrefixLengthAndBuildHistogram bool exclude = true +Dec 18, 2023 2:31:14 PM sun.util.locale.provider.LocaleProviderAdapter +WARNING: COMPAT locale provider will be removed in a future release +[2023-12-18T14:31:15,025][INFO ][o.a.l.i.v.PanamaVectorizationProvider] [runTask-0] Java vector incubator API enabled; uses preferredBitSize=128 +[2023-12-18T14:31:15,437][INFO ][o.e.n.Node ] [runTask-0] version[8.13.0-SNAPSHOT], pid[43801], build[tar/25d9bbbb5327023f7f1896fb3044fb4c9d231342/2023-12-18T13:27:39.871866Z], OS[Mac OS X/14.1.2/aarch64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/21/21+35-2513] +[2023-12-18T14:31:15,438][INFO ][o.e.n.Node ] [runTask-0] JVM home [/Users/przemyslawgomulka/.gradle/jdks/oracle_corporation-21-aarch64-os_x/jdk-21.jdk/Contents/Home], using bundled JDK [false] +[2023-12-18T14:31:15,438][INFO ][o.e.n.Node ] [runTask-0] JVM arguments [-Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -Djava.security.manager=allow, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j2.formatMsgNoLookups=true, -Djava.locale.providers=SPI,COMPAT, --add-opens=java.base/java.io=org.elasticsearch.preallocate, -Des.distribution.type=tar, -XX:+UseG1GC, -Djava.io.tmpdir=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/tmp, --add-modules=jdk.incubator.vector, -XX:CompileCommand=exclude,org.apache.lucene.util.MSBRadixSorter::computeCommonPrefixLengthAndBuildHistogram, -XX:CompileCommand=exclude,org.apache.lucene.util.RadixSelector::computeCommonPrefixLengthAndBuildHistogram, -XX:+HeapDumpOnOutOfMemoryError, -XX:+ExitOnOutOfMemoryError, -XX:HeapDumpPath=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs, -XX:ErrorFile=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs/gc.log:utctime,level,pid,tags:filecount=32,filesize=64m, -Xms512m, -Xmx512m, -ea, -esa, -Dingest.geoip.downloader.enabled.default=true, -Dio.netty.leakDetection.level=paranoid, -XX:MaxDirectMemorySize=268435456, -XX:G1HeapRegionSize=4m, -XX:InitiatingHeapOccupancyPercent=30, -XX:G1ReservePercent=15, --module-path=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/lib, --add-modules=jdk.net, --add-modules=ALL-MODULE-PATH, -Djdk.module.main=org.elasticsearch.server] +[2023-12-18T14:31:15,438][WARN ][o.e.n.Node ] [runTask-0] version [8.13.0-SNAPSHOT] is a pre-release version of Elasticsearch and is not suitable for production +[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-url] +[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rest-root] +[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-core] +[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-redact] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-user-agent] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-async-search] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-error-query] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-monitoring] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-s3] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-analytics] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ent-search] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-autoscaling] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-painless] +[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ml] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-die-with-dignity] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [legacy-geo] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-mustache] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ql] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rank-rrf] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [analysis-common] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-seek-tracking-directory] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [health-shards-availability] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [transport-netty4] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [aggregations] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-common] +[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-identity-provider] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [frozen-indices] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-text-structure] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-shutdown] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [snapshot-repo-test-kit] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ml-package-loader] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-delayed-aggs] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [kibana] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [constant-keyword] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-logstash] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-graph] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ccr] +[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-esql] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [parent-join] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [counted-keyword] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-enrich] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repositories-metering-api] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [transform] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-azure] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-gcs] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [spatial] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [apm] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [mapper-extras] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [mapper-version] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-rollup] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [percolator] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [data-streams] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-stack] +[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rank-eval] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [reindex] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-apm-integration] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-security] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [blob-cache] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [searchable-snapshots] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-slm] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-latency-simulating-directory] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [snapshot-based-recoveries] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-watcher] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [old-lucene-versions] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ilm] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-inference] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-voting-only-node] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-deprecation] +[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-fleet] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-profiling] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-aggregate-metric] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-downsample] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-geoip] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-write-load-forecaster] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [search-business-rules] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [wildcard] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-attachment] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-apm-data] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [unsigned-long] +[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-sql] +[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [runtime-fields-common] +[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-async] +[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [vector-tile] +[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-expression] +[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-eql] +[2023-12-18T14:31:17,224][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [failure_store] is enabled +[2023-12-18T14:31:17,520][INFO ][o.e.e.NodeEnvironment ] [runTask-0] using [1] data paths, mounts [[/System/Volumes/Data (/dev/disk3s5)]], net usable_space [636.9gb], net total_space [926.3gb], types [apfs] +[2023-12-18T14:31:17,520][INFO ][o.e.e.NodeEnvironment ] [runTask-0] heap size [512mb], compressed ordinary object pointers [true] +[2023-12-18T14:31:17,543][INFO ][o.e.n.Node ] [runTask-0] node name [runTask-0], node ID [RpeX_621SdCZSBh8-RSQvg], cluster name [runTask], roles [ingest, data_frozen, ml, data_hot, transform, data_content, data_warm, master, remote_cluster_client, data, data_cold] +[2023-12-18T14:31:19,294][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.shards.unassigned.count +[2023-12-18T14:31:19,294][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.shards.count +[2023-12-18T14:31:19,295][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.allocations.undesired.count +[2023-12-18T14:31:19,295][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.allocations.undesired.ratio +[2023-12-18T14:31:19,309][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [semantic_text] is enabled +[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.parent.trip.total +[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.field_data.trip.total +[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.request.trip.total +[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.in_flight_requests.trip.total +[2023-12-18T14:31:19,329][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.model_inference.trip.total +[2023-12-18T14:31:19,329][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.eql_sequence.trip.total +[2023-12-18T14:31:19,350][INFO ][o.e.f.FeatureService ] [runTask-0] Registered local node features [features_supported, health.dsl.info, usage.data_tiers.precalculate_stats] +[2023-12-18T14:31:19,497][INFO ][o.e.x.m.p.l.CppLogMessageHandler] [runTask-0] [controller/43802] [Main.cc@123] controller (64 bit): Version 8.13.0-SNAPSHOT (Build c9c232240dd04f) Copyright (c) 2023 Elasticsearch BV +[2023-12-18T14:31:19,659][INFO ][o.e.t.a.APM ] [runTask-0] Sending apm metrics is disabled +[2023-12-18T14:31:19,660][INFO ][o.e.t.a.APM ] [runTask-0] Sending apm tracing is disabled +[2023-12-18T14:31:19,670][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_counter.count +[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_counter.count +[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_histogram.histogram +[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_histogram.histogram +[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_gauge.total +[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_gauge.total +[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.async_long_counter.count +[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.async_double_counter.count +[2023-12-18T14:31:19,672][INFO ][o.e.x.s.Security ] [runTask-0] Security is enabled +[2023-12-18T14:31:19,850][INFO ][o.e.x.s.a.s.FileRolesStore] [runTask-0] parsed [1] roles from file [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/config/roles.yml] +[2023-12-18T14:31:20,137][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.miss_that_triggered_read.count +[2023-12-18T14:31:20,138][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.evicted_used_regions.count +[2023-12-18T14:31:20,138][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.cache_miss_load.time +[2023-12-18T14:31:20,231][INFO ][o.e.x.w.Watcher ] [runTask-0] Watcher initialized components at 2023-12-18T13:31:20.231Z +[2023-12-18T14:31:20,256][INFO ][o.e.x.p.ProfilingPlugin ] [runTask-0] Profiling is enabled +[2023-12-18T14:31:20,267][INFO ][o.e.x.p.ProfilingPlugin ] [runTask-0] profiling index templates will not be installed or reinstalled +[2023-12-18T14:31:20,287][INFO ][o.e.x.a.APMPlugin ] [runTask-0] APM ingest plugin is disabled +[2023-12-18T14:31:20,408][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [connector_api] is enabled +[2023-12-18T14:31:20,553][INFO ][o.e.t.n.NettyAllocator ] [runTask-0] creating NettyAllocator with the following configs: [name=unpooled, suggested_max_allocation_size=1mb, factors={es.unsafe.use_unpooled_allocator=null, g1gc_enabled=true, g1gc_region_size=4mb, heap_size=512mb}] +[2023-12-18T14:31:20,566][INFO ][o.e.i.r.RecoverySettings ] [runTask-0] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b] +[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.requests.count +[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.exceptions.count +[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.throttles.count +[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.operations.count +[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.operations.unsuccessful.count +[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.exceptions.histogram +[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.throttles.histogram +[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.requests.http_request_time.histogram +[2023-12-18T14:31:20,591][INFO ][o.e.d.DiscoveryModule ] [runTask-0] using discovery type [multi-node] and seed hosts providers [settings, file] +[2023-12-18T14:31:20,619][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.health.overall.red.status +[2023-12-18T14:31:21,154][INFO ][o.e.n.Node ] [runTask-0] initialized +[2023-12-18T14:31:21,155][INFO ][o.e.n.Node ] [runTask-0] starting ... +[2023-12-18T14:31:21,182][INFO ][o.e.x.s.c.f.PersistentCache] [runTask-0] persistent cache index loaded +[2023-12-18T14:31:21,183][INFO ][o.e.x.d.l.DeprecationIndexingComponent] [runTask-0] deprecation component started +[2023-12-18T14:31:21,232][INFO ][o.e.t.TransportService ] [runTask-0] publish_address {127.0.0.1:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300} +[2023-12-18T14:31:21,322][WARN ][o.e.b.BootstrapChecks ] [runTask-0] Transport SSL must be enabled if security is enabled. Please set [xpack.security.transport.ssl.enabled] to [true] or disable security by setting [xpack.security.enabled] to [false]; for more information see [https://www.elastic.co/guide/en/elasticsearch/reference/master/bootstrap-checks-xpack.html#bootstrap-checks-tls] +[2023-12-18T14:31:21,323][INFO ][o.e.c.c.ClusterBootstrapService] [runTask-0] this node has not joined a bootstrapped cluster yet; [cluster.initial_master_nodes] is set to [runTask-0] +[2023-12-18T14:31:21,325][WARN ][o.e.d.FileBasedSeedHostsProvider] [runTask-0] expected, but did not find, a dynamic hosts list at [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/config/unicast_hosts.txt] +[2023-12-18T14:31:21,325][INFO ][o.e.c.c.Coordinator ] [runTask-0] setting initial configuration to VotingConfiguration{RpeX_621SdCZSBh8-RSQvg} +[2023-12-18T14:31:21,367][INFO ][o.e.h.AbstractHttpServerTransport] [runTask-0] publish_address {127.0.0.1:9200}, bound_addresses {[::1]:9200}, {127.0.0.1:9200} +[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.get.total +[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.get.time +[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.search.fetch.total +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.search.fetch.time +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.merge.total +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.merge.time +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.operations.count +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.size +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.uncommitted_operations.count +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.uncommitted.size +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.earliest_last_modified_age.time +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.transport.rx.size +[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.transport.tx.size +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.young.size +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.survivor.size +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.old.size +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.fs.io_stats.io_time.total +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.docs.total +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.docs.total +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.failed.total +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.docs.total +[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.docs.total +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.time +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.time +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.throttle.time +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.noop.total +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.memory.size.total +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.count.total +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.memory.size.total +[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.count +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.rejections.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.memory.size.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.count.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.memory.size.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.count.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.rejections.total +[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.memory.limit.total +[2023-12-18T14:31:21,376][INFO ][o.e.n.Node ] [runTask-0] started {runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}{ml.machine_memory=68719476736, ml.config_version=12.0.0, ml.max_jvm_size=536870912, ml.allocated_processors=10, xpack.installed=true, transform.config_version=10.0.0, testattr=test, ml.allocated_processors_double=10.0} +[2023-12-18T14:31:21,421][INFO ][o.e.c.s.MasterService ] [runTask-0] elected-as-master ([1] nodes joined in term 1)[_FINISH_ELECTION_, {runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007} completing election], term: 1, version: 1, delta: master node changed {previous [], current [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}]} +[2023-12-18T14:31:21,459][INFO ][o.e.c.c.CoordinationState] [runTask-0] cluster UUID set to [8TjOrE8yTw-_eqrhD95JPw] +[2023-12-18T14:31:21,484][INFO ][o.e.c.s.ClusterApplierService] [runTask-0] master node changed {previous [], current [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}]}, term: 1, version: 1, reason: Publication{term=1, version=1} +[2023-12-18T14:31:21,498][INFO ][o.e.c.f.AbstractFileWatchingService] [runTask-0] starting file watcher ... +[2023-12-18T14:31:21,500][INFO ][o.e.c.f.AbstractFileWatchingService] [runTask-0] file settings service up and running [tid=60] +[2023-12-18T14:31:21,503][INFO ][o.e.c.c.NodeJoinExecutor ] [runTask-0] node-join: [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}] with reason [completing election] +[2023-12-18T14:31:21,560][INFO ][o.e.g.GatewayService ] [runTask-0] recovered [0] indices into cluster_state +[2023-12-18T14:31:21,638][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [behavioral_analytics-events-mappings] +[2023-12-18T14:31:21,650][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-sync-jobs-mappings] +[2023-12-18T14:31:21,652][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-settings] +[2023-12-18T14:31:21,654][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-sync-jobs-settings] +[2023-12-18T14:31:21,674][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-ent-search-mb] for index patterns [.monitoring-ent-search-8-*] +[2023-12-18T14:31:21,681][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-mappings] +[2023-12-18T14:31:21,694][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-logstash-mb] for index patterns [.monitoring-logstash-8-*] +[2023-12-18T14:31:21,697][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-state] for index patterns [.ml-state*] +[2023-12-18T14:31:21,702][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-notifications-000002] for index patterns [.ml-notifications-000002] +[2023-12-18T14:31:21,705][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [search-acl-filter] for index patterns [.search-acl-filter-*] +[2023-12-18T14:31:21,723][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-kibana-mb] for index patterns [.monitoring-kibana-8-*] +[2023-12-18T14:31:21,728][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-kibana] for index patterns [.monitoring-kibana-7-*] +[2023-12-18T14:31:21,734][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-logstash] for index patterns [.monitoring-logstash-7-*] +[2023-12-18T14:31:21,756][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-es] for index patterns [.monitoring-es-7-*] +[2023-12-18T14:31:21,763][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-beats] for index patterns [.monitoring-beats-7-*] +[2023-12-18T14:31:21,766][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-alerts-7] for index patterns [.monitoring-alerts-7] +[2023-12-18T14:31:21,775][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-anomalies-] for index patterns [.ml-anomalies-*] +[2023-12-18T14:31:21,786][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-beats-mb] for index patterns [.monitoring-beats-8-*] +[2023-12-18T14:31:21,802][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-es-mb] for index patterns [.monitoring-es-8-*] +[2023-12-18T14:31:21,806][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-stats] for index patterns [.ml-stats-*] +[2023-12-18T14:31:21,810][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics-mappings] +[2023-12-18T14:31:21,811][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-tsdb-settings] +[2023-12-18T14:31:21,813][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs-mappings] +[2023-12-18T14:31:21,816][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-mappings] +[2023-12-18T14:31:21,820][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [data-streams-mappings] +[2023-12-18T14:31:21,824][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [ecs@dynamic_templates] +[2023-12-18T14:31:21,825][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics-settings] +[2023-12-18T14:31:21,826][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-settings] +[2023-12-18T14:31:21,828][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs@mappings] +[2023-12-18T14:31:21,829][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics@mappings] +[2023-12-18T14:31:21,831][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@settings] +[2023-12-18T14:31:21,832][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics@settings] +[2023-12-18T14:31:21,834][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@mappings] +[2023-12-18T14:31:21,836][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [data-streams@mappings] +[2023-12-18T14:31:21,839][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [ecs@mappings] +[2023-12-18T14:31:21,842][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.kibana-reporting] for index patterns [.kibana-reporting*] +[2023-12-18T14:31:21,843][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@tsdb-settings] +[2023-12-18T14:31:21,846][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.slm-history] for index patterns [.slm-history-6*] +[2023-12-18T14:31:21,849][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [ilm-history] for index patterns [ilm-history-6*] +[2023-12-18T14:31:21,851][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [.deprecation-indexing-mappings] +[2023-12-18T14:31:21,852][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [.deprecation-indexing-settings] +[2023-12-18T14:31:21,856][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-fromhost-data] for index patterns [.fleet-fileds-fromhost-data-*] +[2023-12-18T14:31:21,859][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-tohost-data] for index patterns [.fleet-fileds-tohost-data-*] +[2023-12-18T14:31:21,862][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-tohost-meta] for index patterns [.fleet-fileds-tohost-meta-*] +[2023-12-18T14:31:21,867][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-fromhost-meta] for index patterns [.fleet-fileds-fromhost-meta-*] +[2023-12-18T14:31:21,872][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.watch-history-16] for index patterns [.watcher-history-16*] +[2023-12-18T14:31:21,943][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [elastic-connectors-sync-jobs] for index patterns [.elastic-connectors-sync-jobs-v1] +[2023-12-18T14:31:21,946][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [synthetics] for index patterns [synthetics-*-*] +[2023-12-18T14:31:21,949][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [elastic-connectors] for index patterns [.elastic-connectors-v1] +[2023-12-18T14:31:21,952][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [metrics] for index patterns [metrics-*-*] +[2023-12-18T14:31:21,955][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.deprecation-indexing-template] for index patterns [.logs-deprecation.*] +[2023-12-18T14:31:21,989][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.monitoring-8-ilm-policy] +[2023-12-18T14:31:22,028][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [ml-size-based-ilm-policy] +[2023-12-18T14:31:22,063][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [metrics] +[2023-12-18T14:31:22,091][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [synthetics] +[2023-12-18T14:31:22,119][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [30-days-default] +[2023-12-18T14:31:22,207][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline behavioral_analytics-events-final_pipeline +[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs-default-pipeline +[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@default-pipeline +[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline ent-search-generic-ingestion +[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@json-pipeline +[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@json-message +[2023-12-18T14:31:22,210][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [behavioral_analytics-events-settings] +[2023-12-18T14:31:22,211][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs@settings] +[2023-12-18T14:31:22,211][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs-settings] +[2023-12-18T14:31:22,258][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [behavioral_analytics-events-default] for index patterns [behavioral_analytics-events-*] +[2023-12-18T14:31:22,261][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [logs] for index patterns [logs-*-*] +[2023-12-18T14:31:22,293][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [logs] +[2023-12-18T14:31:22,319][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [180-days-default] +[2023-12-18T14:31:22,347][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [365-days-default] +[2023-12-18T14:31:22,378][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [90-days-default] +[2023-12-18T14:31:22,406][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [7-days-default] +[2023-12-18T14:31:22,448][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [logs@lifecycle] +[2023-12-18T14:31:22,488][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [metrics@lifecycle] +[2023-12-18T14:31:22,522][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [synthetics@lifecycle] +[2023-12-18T14:31:22,551][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [90-days@lifecycle] +[2023-12-18T14:31:22,587][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [180-days@lifecycle] +[2023-12-18T14:31:22,621][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [7-days@lifecycle] +[2023-12-18T14:31:22,648][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [365-days@lifecycle] +[2023-12-18T14:31:22,675][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [30-days@lifecycle] +[2023-12-18T14:31:22,701][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [watch-history-ilm-policy-16] +[2023-12-18T14:31:22,729][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [slm-history-ilm-policy] +[2023-12-18T14:31:22,770][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [ilm-history-ilm-policy] +[2023-12-18T14:31:22,809][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.deprecation-indexing-ilm-policy] +[2023-12-18T14:31:22,846][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-tohost-data-ilm-policy] +[2023-12-18T14:31:22,875][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-fromhost-meta-ilm-policy] +[2023-12-18T14:31:22,903][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-fromhost-data-ilm-policy] +[2023-12-18T14:31:22,932][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-actions-results-ilm-policy] +[2023-12-18T14:31:22,959][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-tohost-meta-ilm-policy] +[2023-12-18T14:31:23,055][INFO ][o.e.h.n.s.HealthNodeTaskExecutor] [runTask-0] Node [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}] is selected as the current health node. +[2023-12-18T14:31:23,113][INFO ][o.e.x.s.a.Realms ] [runTask-0] license mode is [basic], currently licensed security realms are [reserved/reserved,file/default_file,native/default_native] +[2023-12-18T14:31:23,114][INFO ][o.e.l.ClusterStateLicenseService] [runTask-0] license [d539d324-3578-49b7-8410-aa7c09249acf] mode [basic] - valid +<============-> 99% EXECUTING [34s] +> IDLE +> IDLE +> IDLE +> :run + + + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java index f2cedd4bf0f6b..35f73551c2c3a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java @@ -103,7 +103,7 @@ public MlMetrics( private void registerMlNodeMetrics(MeterRegistry meterRegistry) { metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.limit", + "es.ml.native_memory.usage", "ML native memory limit on this node.", "bytes", () -> new LongWithAttributes(nativeMemLimit, Map.of()) @@ -111,7 +111,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.anomaly_detectors", + "es.ml.native_memory.usage.anomaly_detectors.usage", "ML native memory used by anomaly detection jobs on this node.", "bytes", () -> new LongWithAttributes(nativeMemAdUsage, Map.of()) @@ -119,7 +119,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.data_frame_analytics", + "es.ml.native_memory.usage.data_frame_analytics.usage", "ML native memory used by data frame analytics jobs on this node.", "bytes", () -> new LongWithAttributes(nativeMemDfaUsage, Map.of()) @@ -127,7 +127,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.trained_models", + "es.ml.native_memory.usage.trained_models.usage", "ML native memory used by trained models on this node.", "bytes", () -> new LongWithAttributes(nativeMemTrainedModelUsage, Map.of()) @@ -135,7 +135,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.free", + "es.ml.native_memory.free.size", "Free ML native memory on this node.", "bytes", () -> new LongWithAttributes(nativeMemFree, Map.of()) @@ -146,7 +146,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { metrics.add( meterRegistry.registerLongGauge( - "es.ml.anomaly_detectors.opening.count", + "es.ml.anomaly_detectors.opening.current", "Count of anomaly detection jobs in the opening state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.adOpeningCount, isMasterMap) @@ -154,7 +154,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.anomaly_detectors.opened.count", + "es.ml.anomaly_detectors.opened.current", "Count of anomaly detection jobs in the opened state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.adOpenedCount, isMasterMap) @@ -162,7 +162,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.anomaly_detectors.closing.count", + "es.ml.anomaly_detectors.closing.current", "Count of anomaly detection jobs in the closing state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.adClosingCount, isMasterMap) @@ -170,7 +170,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.anomaly_detectors.failed.count", + "es.ml.anomaly_detectors.failed.current", "Count of anomaly detection jobs in the failed state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.adFailedCount, isMasterMap) @@ -178,7 +178,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.datafeeds.starting.count", + "es.ml.datafeeds.starting.current", "Count of datafeeds in the starting state cluster-wide.", "datafeeds", () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartingCount, isMasterMap) @@ -186,7 +186,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.datafeeds.started.count", + "es.ml.datafeeds.started.current", "Count of datafeeds in the started state cluster-wide.", "datafeeds", () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartedCount, isMasterMap) @@ -194,7 +194,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.datafeeds.stopping.count", + "es.ml.datafeeds.stopping.current", "Count of datafeeds in the stopping state cluster-wide.", "datafeeds", () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStoppingCount, isMasterMap) @@ -202,7 +202,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.starting.count", + "es.ml.data_frame_analytics.starting.current", "Count of data frame analytics jobs in the starting state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartingCount, isMasterMap) @@ -210,7 +210,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.started.count", + "es.ml.data_frame_analytics.started.current", "Count of data frame analytics jobs in the started state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartedCount, isMasterMap) @@ -218,7 +218,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.reindexing.count", + "es.ml.data_frame_analytics.reindexing.current", "Count of data frame analytics jobs in the reindexing state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaReindexingCount, isMasterMap) @@ -226,7 +226,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.analyzing.count", + "es.ml.data_frame_analytics.analyzing.current", "Count of data frame analytics jobs in the analyzing state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaAnalyzingCount, isMasterMap) @@ -234,7 +234,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.stopping.count", + "es.ml.data_frame_analytics.stopping.current", "Count of data frame analytics jobs in the stopping state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaStoppingCount, isMasterMap) @@ -242,7 +242,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.data_frame_analytics.failed.count", + "es.ml.data_frame_analytics.failed.current", "Count of data frame analytics jobs in the failed state cluster-wide.", "jobs", () -> new LongWithAttributes(mlTaskStatusCounts.dfaFailedCount, isMasterMap) @@ -250,7 +250,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.trained_models.deployment.target_allocations.count", + "es.ml.trained_models.deployment.target_allocations.current", "Sum of target trained model allocations across all deployments cluster-wide.", "allocations", () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsTargetAllocations, isMasterMap) @@ -258,7 +258,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.trained_models.deployment.current_allocations.count", + "es.ml.trained_models.deployment.current_allocations.current", "Sum of current trained model allocations across all deployments cluster-wide.", "allocations", () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsCurrentAllocations, isMasterMap) @@ -266,7 +266,7 @@ private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.trained_models.deployment.failed_allocations.count", + "es.ml.trained_models.deployment.failed_allocations.current", "Sum of failed trained model allocations across all deployments cluster-wide.", "allocations", () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsFailedAllocations, isMasterMap) From eef97baff2a8cf518a5282b9fe88e9466c26c885 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 10:11:17 +0000 Subject: [PATCH 31/47] Make IndexShard#openEngineAndRecoverFromTranslog async (#96774) Removes one of the remaining production instances of a sync call to `Engine#recoverFromTranslog`. Relates #96767, #96607. --- .../elasticsearch/index/shard/IndexShard.java | 40 ++++++++++--------- .../index/shard/StoreRecovery.java | 10 ++++- .../index/shard/IndexShardTests.java | 2 +- 3 files changed, 31 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index d7d67b3af159e..aa6e3e1d45003 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1970,24 +1970,28 @@ private void loadGlobalCheckpointToReplicationTracker() throws IOException { * opens the engine on top of the existing lucene engine and translog. * Operations from the translog will be replayed to bring lucene up to date. **/ - public void openEngineAndRecoverFromTranslog() throws IOException { - recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX); - maybeCheckIndex(); - recoveryState.setLocalTranslogStage(); - final RecoveryState.Translog translogRecoveryStats = recoveryState.getTranslog(); - final Engine.TranslogRecoveryRunner translogRecoveryRunner = (engine, snapshot) -> { - translogRecoveryStats.totalOperations(snapshot.totalOperations()); - translogRecoveryStats.totalOperationsOnStart(snapshot.totalOperations()); - return runTranslogRecovery( - engine, - snapshot, - Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY, - translogRecoveryStats::incrementRecoveredOperations - ); - }; - loadGlobalCheckpointToReplicationTracker(); - innerOpenEngineAndTranslog(replicationTracker); - getEngine().recoverFromTranslog(translogRecoveryRunner, Long.MAX_VALUE); + public void openEngineAndRecoverFromTranslog(ActionListener listener) { + try { + recoveryState.validateCurrentStage(RecoveryState.Stage.INDEX); + maybeCheckIndex(); + recoveryState.setLocalTranslogStage(); + final RecoveryState.Translog translogRecoveryStats = recoveryState.getTranslog(); + final Engine.TranslogRecoveryRunner translogRecoveryRunner = (engine, snapshot) -> { + translogRecoveryStats.totalOperations(snapshot.totalOperations()); + translogRecoveryStats.totalOperationsOnStart(snapshot.totalOperations()); + return runTranslogRecovery( + engine, + snapshot, + Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY, + translogRecoveryStats::incrementRecoveredOperations + ); + }; + loadGlobalCheckpointToReplicationTracker(); + innerOpenEngineAndTranslog(replicationTracker); + getEngine().recoverFromTranslog(translogRecoveryRunner, Long.MAX_VALUE, listener); + } catch (Exception e) { + listener.onFailure(e); + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index bc5a4b02116a7..0acddcf0e45b2 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -482,7 +482,10 @@ private void internalRecoverFromStore(IndexShard indexShard, ActionListenerandThen((l, ignored) -> { indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("post recovery from shard_store", l); @@ -583,7 +586,10 @@ record ShardAndIndexIds(IndexId indexId, ShardId shardId) {} bootstrap(indexShard, store); assert indexShard.shardRouting.primary() : "only primary shards can recover from store"; writeEmptyRetentionLeasesFile(indexShard); - indexShard.openEngineAndRecoverFromTranslog(); + indexShard.openEngineAndRecoverFromTranslog(l); + }) + + .andThen((l, ignored) -> { indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done", l); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 869143066bd7a..e6d6de16cff2c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2961,7 +2961,7 @@ public void testShardActiveDuringInternalRecovery() throws IOException { // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); shard.recoveryState().getIndex().setFileDetailsComplete(); - shard.openEngineAndRecoverFromTranslog(); + PlainActionFuture.get(shard::openEngineAndRecoverFromTranslog, 30, TimeUnit.SECONDS); // Shard should now be active since we did recover: assertTrue(shard.isActive()); closeShards(shard); From 4bf3fc30451ff22985dfd181742bb3e0fe8f1433 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 9 Jan 2024 11:12:11 +0100 Subject: [PATCH 32/47] Move ShapeTestUtils and XShapeTestUtil to the test framework (#104061) --- .../java/org/elasticsearch/geo}/ShapeTestUtils.java | 12 ++++++------ .../java/org/elasticsearch}/geo/XShapeTestUtil.java | 5 ++++- .../spatial/search/CartesianShapeQueryTestCase.java | 2 +- .../spatial/search/ShapeQueryOverShapeTests.java | 2 +- .../aggregations/metrics/CartesianBoundsIT.java | 2 +- .../aggregations/metrics/CartesianCentroidIT.java | 2 +- .../fielddata/CartesianCentroidCalculatorTests.java | 2 +- .../mapper/CartesianShapeDocValuesQueryTests.java | 4 ++-- .../index/query/ShapeQueryBuilderOverPointTests.java | 2 +- .../index/query/ShapeQueryBuilderOverShapeTests.java | 2 +- .../metrics/CartesianBoundsAggregatorTests.java | 2 +- .../metrics/CartesianCentroidAggregatorTests.java | 2 +- .../metrics/CartesianShapeBoundsAggregatorTests.java | 2 +- .../CartesianShapeCentroidAggregatorTests.java | 2 +- .../metrics/InternalCartesianCentroidTests.java | 2 +- .../xpack/spatial/util/ShapeUtilTests.java | 3 ++- 16 files changed, 26 insertions(+), 22 deletions(-) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util => test/framework/src/main/java/org/elasticsearch/geo}/ShapeTestUtils.java (96%) rename {x-pack/plugin/spatial/src/test/java/org/apache/lucene => test/framework/src/main/java/org/elasticsearch}/geo/XShapeTestUtil.java (98%) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeTestUtils.java b/test/framework/src/main/java/org/elasticsearch/geo/ShapeTestUtils.java similarity index 96% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeTestUtils.java rename to test/framework/src/main/java/org/elasticsearch/geo/ShapeTestUtils.java index 96667493de21c..1e21ad1acfd08 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/geo/ShapeTestUtils.java @@ -1,12 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.util; +package org.elasticsearch.geo; -import org.apache.lucene.geo.XShapeTestUtil; import org.apache.lucene.geo.XYCircle; import org.apache.lucene.geo.XYPolygon; import org.elasticsearch.geometry.Circle; @@ -117,7 +117,7 @@ public static Polygon randomPolygon(boolean hasAlt) { return new Polygon(linearRing(floatsToDoubles(lucenePolygon.getPolyX()), floatsToDoubles(lucenePolygon.getPolyY()), hasAlt)); } - static double area(XYPolygon p) { + public static double area(XYPolygon p) { double windingSum = 0; final int numPts = p.numPoints() - 1; for (int i = 0; i < numPts; i++) { @@ -127,7 +127,7 @@ static double area(XYPolygon p) { return Math.abs(windingSum / 2); } - static double[] floatsToDoubles(float[] f) { + public static double[] floatsToDoubles(float[] f) { double[] d = new double[f.length]; for (int i = 0; i < f.length; i++) { d[i] = f[i]; diff --git a/x-pack/plugin/spatial/src/test/java/org/apache/lucene/geo/XShapeTestUtil.java b/test/framework/src/main/java/org/elasticsearch/geo/XShapeTestUtil.java similarity index 98% rename from x-pack/plugin/spatial/src/test/java/org/apache/lucene/geo/XShapeTestUtil.java rename to test/framework/src/main/java/org/elasticsearch/geo/XShapeTestUtil.java index e9ebf4534693e..62b3edfe24193 100644 --- a/x-pack/plugin/spatial/src/test/java/org/apache/lucene/geo/XShapeTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/geo/XShapeTestUtil.java @@ -15,11 +15,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.lucene.geo; +package org.elasticsearch.geo; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.generators.BiasedNumbers; +import org.apache.lucene.geo.XYCircle; +import org.apache.lucene.geo.XYPolygon; +import org.apache.lucene.geo.XYRectangle; import org.apache.lucene.tests.util.TestUtil; import java.util.ArrayList; diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/CartesianShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/CartesianShapeQueryTestCase.java index ca7f59b703f28..dd90112439cb9 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/CartesianShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/CartesianShapeQueryTestCase.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.Line; @@ -17,7 +18,6 @@ import org.elasticsearch.search.geo.BaseShapeQueryTestCase; import org.elasticsearch.search.geo.SpatialQueryBuilders; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.util.ArrayList; import java.util.Collections; diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 5e4b778d6c093..554c9ff2904dc 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.MultiPoint; @@ -25,7 +26,6 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsIT.java index 85e371023348f..b66daeaa820b5 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.spatial.search.aggregations.metrics; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.metrics.SpatialBounds; @@ -15,7 +16,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidIT.java index 7e1458f9d63b1..cc372eade8c90 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidIT.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.spatial.search.aggregations.metrics; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.metrics.CentroidAggregationTestBase; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java index b776e27ce4cfb..4ce533515d20e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.spatial.index.fielddata; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Line; import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; @@ -14,7 +15,6 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; public class CartesianCentroidCalculatorTests extends CentroidCalculatorTests { protected Point randomPoint() { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java index 6eab9964c6ea8..ae5a6f182274b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.ShapeField; import org.apache.lucene.document.XYShape; -import org.apache.lucene.geo.XShapeTestUtil; import org.apache.lucene.geo.XYGeometry; import org.apache.lucene.geo.XYPolygon; import org.apache.lucene.geo.XYRectangle; @@ -26,11 +25,12 @@ import org.apache.lucene.tests.search.CheckHits; import org.apache.lucene.tests.search.QueryUtils; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geo.XShapeTestUtil; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.common.ShapeUtils; import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverPointTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverPointTests.java index 90b57b95b03e3..db67b1f1e998b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverPointTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverPointTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java index f890947698a97..aa5ae72df2b9e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java index 8be04619b5d6f..f8285fdd0eef5 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregatorTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugins.SearchPlugin; @@ -26,7 +27,6 @@ import org.elasticsearch.xpack.spatial.common.CartesianPoint; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianPointValuesSourceType; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java index 6079452c9ca72..4e391c7dd236d 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianCentroidAggregatorTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugins.SearchPlugin; @@ -25,7 +26,6 @@ import org.elasticsearch.xpack.spatial.common.CartesianPoint; import org.elasticsearch.xpack.spatial.index.mapper.PointFieldMapper; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianPointValuesSourceType; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java index f2ceea6c2e87c..8f479c7ed22c3 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeBoundsAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.Point; @@ -30,7 +31,6 @@ import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianPointValuesSourceType; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianShapeValuesSourceType; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java index 5cd28233a776f..8ade6d8e5695a 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.plugins.SearchPlugin; @@ -31,7 +32,6 @@ import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianPointValuesSourceType; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianShapeValuesSourceType; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java index 5a22706becf06..d033b3e51e31b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/InternalCartesianCentroidTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.Aggregation; @@ -21,7 +22,6 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; import java.util.Collections; import java.util.HashMap; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeUtilTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeUtilTests.java index 442c134c648b2..2b4bf58dd2211 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeUtilTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/ShapeUtilTests.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.spatial.util; -import org.apache.lucene.geo.XShapeTestUtil; import org.apache.lucene.geo.XYPolygon; import org.apache.lucene.geo.XYRectangle; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geo.XShapeTestUtil; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.greaterThan; From 82a5d66a033815107304df787b1eefdfd30a97dc Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 9 Jan 2024 11:17:51 +0100 Subject: [PATCH 33/47] Add serverless scopes for Connector APIs (#104063) --- docs/changelog/104063.yaml | 5 +++++ .../connector/action/RestDeleteConnectorAction.java | 3 +++ .../application/connector/action/RestGetConnectorAction.java | 3 +++ .../connector/action/RestListConnectorAction.java | 3 +++ .../connector/action/RestPostConnectorAction.java | 3 +++ .../application/connector/action/RestPutConnectorAction.java | 3 +++ .../action/RestUpdateConnectorConfigurationAction.java | 3 +++ .../connector/action/RestUpdateConnectorErrorAction.java | 3 +++ .../connector/action/RestUpdateConnectorFilteringAction.java | 3 +++ .../connector/action/RestUpdateConnectorLastSeenAction.java | 3 +++ .../action/RestUpdateConnectorLastSyncStatsAction.java | 3 +++ .../connector/action/RestUpdateConnectorNameAction.java | 3 +++ .../connector/action/RestUpdateConnectorPipelineAction.java | 3 +++ .../action/RestUpdateConnectorSchedulingAction.java | 3 +++ .../syncjob/action/RestCancelConnectorSyncJobAction.java | 3 +++ .../syncjob/action/RestCheckInConnectorSyncJobAction.java | 3 +++ .../syncjob/action/RestDeleteConnectorSyncJobAction.java | 3 +++ .../syncjob/action/RestGetConnectorSyncJobAction.java | 3 +++ .../syncjob/action/RestListConnectorSyncJobsAction.java | 3 +++ .../syncjob/action/RestPostConnectorSyncJobAction.java | 3 +++ .../action/RestUpdateConnectorSyncJobErrorAction.java | 3 +++ .../RestUpdateConnectorSyncJobIngestionStatsAction.java | 3 +++ 22 files changed, 68 insertions(+) create mode 100644 docs/changelog/104063.yaml diff --git a/docs/changelog/104063.yaml b/docs/changelog/104063.yaml new file mode 100644 index 0000000000000..5f59022472c75 --- /dev/null +++ b/docs/changelog/104063.yaml @@ -0,0 +1,5 @@ +pr: 104063 +summary: Add serverless scopes for Connector APIs +area: Application +type: enhancement +issues: [] diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java index 02153710a99a0..8030b9922eaa2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -18,6 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; +@ServerlessScope(Scope.PUBLIC) public class RestDeleteConnectorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java index 50691bf4d5ea8..79922755e67ef 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; +@ServerlessScope(Scope.PUBLIC) public class RestGetConnectorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java index 59d984438ebf6..9c37e31944ac8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.core.action.util.PageParams; @@ -19,6 +21,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; +@ServerlessScope(Scope.PUBLIC) public class RestListConnectorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java index 9bfa3fd629567..2c5f1dda4e554 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java @@ -11,6 +11,8 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -18,6 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; +@ServerlessScope(Scope.PUBLIC) public class RestPostConnectorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPutConnectorAction.java index e87719943fc29..1d1254bfda3ce 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPutConnectorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestPutConnectorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java index 12c96d212f77a..f4cc47da2f109 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorConfigurationAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java index 8b4b70b994ec1..df56f5825f84e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorErrorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java index 4908e9e09d73f..ae294dfebd111 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorFilteringAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java index c2c6ee12a7767..bef6c357fdda3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorLastSeenAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java index ff3ba53e34a9d..6275e84a28952 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorLastSyncStatsAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java index c51744e57b1df..7fbd42cbff272 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorNameAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java index 8192099b832dd..465414491bb95 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorPipelineAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java index fda9fa03af913..dfc12659d394b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorSchedulingAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java index 7cfce07aca48d..f55449ad33b86 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; @@ -19,6 +21,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction.Request.CONNECTOR_SYNC_JOB_ID_FIELD; +@ServerlessScope(Scope.PUBLIC) public class RestCancelConnectorSyncJobAction extends BaseRestHandler { private static final String CONNECTOR_SYNC_JOB_ID_PARAM = CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java index 882227e45169a..2c25252daf734 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; @@ -19,6 +21,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; +@ServerlessScope(Scope.PUBLIC) public class RestCheckInConnectorSyncJobAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java index c1f352a341cc3..07cbe6e3aac43 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -18,6 +20,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; +@ServerlessScope(Scope.PUBLIC) public class RestDeleteConnectorSyncJobAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java index 1f5606810757e..d1021281ff53d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -18,6 +20,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; +@ServerlessScope(Scope.PUBLIC) public class RestGetConnectorSyncJobAction extends BaseRestHandler { @Override public String getName() { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java index ef8851636be1b..bb3f55e603905 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; @@ -19,6 +21,7 @@ import java.io.IOException; import java.util.List; +@ServerlessScope(Scope.PUBLIC) public class RestListConnectorSyncJobsAction extends BaseRestHandler { @Override public String getName() { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java index 51cc890418dcb..eac645ab3dc77 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java @@ -11,6 +11,8 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; @@ -19,6 +21,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; +@ServerlessScope(Scope.PUBLIC) public class RestPostConnectorSyncJobAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java index a05be4a92e6e3..720bfdf416827 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; @@ -19,6 +21,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorSyncJobErrorAction extends BaseRestHandler { @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java index 57a362b55ee9b..d55d3ba87d1df 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; @@ -19,6 +21,7 @@ import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; +@ServerlessScope(Scope.PUBLIC) public class RestUpdateConnectorSyncJobIngestionStatsAction extends BaseRestHandler { @Override public String getName() { From 1637391ed2c9b5b48875888ad808aa826c36d3e5 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 9 Jan 2024 11:25:18 +0100 Subject: [PATCH 34/47] Fix security apm metric names (#104119) fix the metric names not adhering to the validation rules --- .../xpack/security/metric/SecurityMetricType.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java index 75a15525bdd55..c44c33f8e64b6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/metric/SecurityMetricType.java @@ -14,8 +14,8 @@ public enum SecurityMetricType { AUTHC_API_KEY( SecurityMetricGroup.AUTHC, - new SecurityMetricInfo("es.security.authc.api_key.success.count", "Number of successful API key authentications.", "count"), - new SecurityMetricInfo("es.security.authc.api_key.failures.count", "Number of failed API key authentications.", "count"), + new SecurityMetricInfo("es.security.authc.api_key.success.total", "Number of successful API key authentications.", "count"), + new SecurityMetricInfo("es.security.authc.api_key.failures.total", "Number of failed API key authentications.", "count"), new SecurityMetricInfo("es.security.authc.api_key.time", "Time it took (in nanoseconds) to execute API key authentication.", "ns") ), From 5a9d396d9b3e8d64e318ca8cc8f7b93807e4e1a8 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 9 Jan 2024 11:53:31 +0100 Subject: [PATCH 35/47] Account for reserved disk size (#103903) --- docs/changelog/103903.yaml | 5 ++ .../elasticsearch/cluster/ClusterInfo.java | 4 -- .../cluster/ClusterInfoSimulator.java | 39 +++++++++++++- .../allocation/DiskThresholdMonitor.java | 2 +- .../decider/DiskThresholdDecider.java | 2 +- .../allocator/ClusterInfoSimulatorTests.java | 53 +++++++++++++++++++ .../DesiredBalanceComputerTests.java | 6 +-- 7 files changed, 99 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/103903.yaml diff --git a/docs/changelog/103903.yaml b/docs/changelog/103903.yaml new file mode 100644 index 0000000000000..c2e5e710ac439 --- /dev/null +++ b/docs/changelog/103903.yaml @@ -0,0 +1,5 @@ +pr: 103903 +summary: Account for reserved disk size +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java index ba0e1de15f192..c2b61e496e9c9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java @@ -376,10 +376,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(shardIds); } - public long getTotal() { - return total; - } - public boolean containsShardId(ShardId shardId) { return shardIds.contains(shardId); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java index 9019ee465c936..7dbd4f864bdb3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfoSimulator.java @@ -35,13 +35,48 @@ public class ClusterInfoSimulator { public ClusterInfoSimulator(RoutingAllocation allocation) { this.allocation = allocation; - this.leastAvailableSpaceUsage = new HashMap<>(allocation.clusterInfo().getNodeLeastAvailableDiskUsages()); - this.mostAvailableSpaceUsage = new HashMap<>(allocation.clusterInfo().getNodeMostAvailableDiskUsages()); + this.leastAvailableSpaceUsage = getAdjustedDiskSpace(allocation, allocation.clusterInfo().getNodeLeastAvailableDiskUsages()); + this.mostAvailableSpaceUsage = getAdjustedDiskSpace(allocation, allocation.clusterInfo().getNodeMostAvailableDiskUsages()); this.shardSizes = new CopyOnFirstWriteMap<>(allocation.clusterInfo().shardSizes); this.shardDataSetSizes = Map.copyOf(allocation.clusterInfo().shardDataSetSizes); this.dataPath = Map.copyOf(allocation.clusterInfo().dataPath); } + /** + * Cluster info contains a reserved space that is necessary to finish initializing shards (that are currently in progress). + * for all initializing shards sum(expected size) = reserved space + already used space + * This deducts already used space from disk usage as when shard start is simulated it is going to add entire expected shard size. + */ + private static Map getAdjustedDiskSpace(RoutingAllocation allocation, Map diskUsage) { + var diskUsageCopy = new HashMap<>(diskUsage); + for (var entry : diskUsageCopy.entrySet()) { + var nodeId = entry.getKey(); + var usage = entry.getValue(); + + var reserved = allocation.clusterInfo().getReservedSpace(nodeId, usage.path()); + if (reserved.total() == 0) { + continue; + } + var node = allocation.routingNodes().node(nodeId); + if (node == null) { + continue; + } + + long adjustment = 0; + for (ShardId shardId : reserved.shardIds()) { + var shard = node.getByShardId(shardId); + if (shard != null) { + var expectedSize = getExpectedShardSize(shard, 0, allocation); + adjustment += expectedSize; + } + } + adjustment -= reserved.total(); + + entry.setValue(updateWithFreeBytes(usage, adjustment)); + } + return diskUsageCopy; + } + /** * This method updates disk usage to reflect shard relocations and new replica initialization. * In case of a single data path both mostAvailableSpaceUsage and leastAvailableSpaceUsage are update to reflect the change. diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java index 6645fd7d0e895..bb0ca372e6a4c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java @@ -232,7 +232,7 @@ public void onNewInfo(ClusterInfo info) { } } - final long reservedSpace = info.getReservedSpace(usage.getNodeId(), usage.getPath()).getTotal(); + final long reservedSpace = info.getReservedSpace(usage.getNodeId(), usage.getPath()).total(); final DiskUsage usageWithReservedSpace = new DiskUsage( usage.getNodeId(), usage.getNodeName(), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 2fa1994f9f74b..22bed76fa2b2e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -125,7 +125,7 @@ public static long sizeOfUnaccountedShards( ) { // Account for reserved space wherever it is available final ClusterInfo.ReservedSpace reservedSpace = clusterInfo.getReservedSpace(node.nodeId(), dataPath); - long totalSize = reservedSpace.getTotal(); + long totalSize = reservedSpace.total(); // NB this counts all shards on the node when the ClusterInfoService retrieved the node stats, which may include shards that are // no longer initializing because their recovery failed or was cancelled. diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java index 7aecd611b931b..d09a6525c9d76 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterInfoSimulatorTests.java @@ -42,6 +42,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.ClusterInfo.shardIdentifierFromRouting; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_RESIZE_SOURCE_NAME_KEY; @@ -158,6 +159,53 @@ public void testInitializeNewReplica() { ); } + public void testInitializeNewReplicaWithReservedSpace() { + + var recoveredSize = 70; + var remainingSize = 30; + var totalShardSize = recoveredSize + remainingSize; + + var indexMetadata = IndexMetadata.builder("my-index").settings(indexSettings(IndexVersion.current(), 1, 1)).build(); + var existingPrimary = newShardRouting(new ShardId(indexMetadata.getIndex(), 0), "node-0", true, STARTED); + var newReplica = newShardRouting( + new ShardId(indexMetadata.getIndex(), 0), + "node-1", + false, + INITIALIZING, + RecoverySource.PeerRecoverySource.INSTANCE + ); + + var initialClusterInfo = new ClusterInfoTestBuilder() // + .withNode("node-0", new DiskUsageBuilder("/data", 1000, 1000 - totalShardSize)) + .withNode("node-1", new DiskUsageBuilder("/data", 1000, 1000 - recoveredSize)) + .withShard(existingPrimary, totalShardSize) + .withReservedSpace("node-1", "/data", remainingSize, newReplica.shardId()) + .build(); + + var state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().put(indexMetadata, false)) + .routingTable( + RoutingTable.builder() + .add(IndexRoutingTable.builder(indexMetadata.getIndex()).addShard(existingPrimary).addShard(newReplica)) + ) + .build(); + var allocation = createRoutingAllocation(state, initialClusterInfo, SnapshotShardSizeInfo.EMPTY); + var simulator = new ClusterInfoSimulator(allocation); + simulator.simulateShardStarted(newReplica); + + assertThat( + simulator.getClusterInfo(), + equalTo( + new ClusterInfoTestBuilder() // + .withNode("node-0", new DiskUsageBuilder("/data", 1000, 1000 - totalShardSize)) + .withNode("node-1", new DiskUsageBuilder("/data", 1000, 1000 - totalShardSize)) + .withShard(existingPrimary, totalShardSize) + .withShard(newReplica, totalShardSize) + .build() + ) + ); + } + public void testRelocateShard() { var fromNodeId = "node-0"; @@ -631,6 +679,11 @@ public ClusterInfoTestBuilder withShard(ShardRouting shard, long size) { return this; } + public ClusterInfoTestBuilder withReservedSpace(String nodeId, String path, long size, ShardId... shardIds) { + reservedSpace.put(new NodeAndPath(nodeId, nodeId + path), new ReservedSpace(size, Set.of(shardIds))); + return this; + } + public ClusterInfo build() { return new ClusterInfo(leastAvailableSpaceUsage, mostAvailableSpaceUsage, shardSizes, Map.of(), Map.of(), reservedSpace); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index f1f81074bc0fb..9fe168074f41e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -905,8 +905,7 @@ public void testAccountForSizeOfMisplacedShardsDuringNewComputation() { IndexRoutingTable.builder(indexMetadata2.getIndex()) .addShard(newShardRouting(index2ShardId, "node-1", true, INITIALIZING, index2SnapshotRecoverySource)) ); - // TODO enable in https://github.com/elastic/elasticsearch/pull/103903 - if (false && randomBoolean()) { + if (randomBoolean()) { // Shard is 75% downloaded clusterInfoBuilder // .withNodeUsedSpace("node-1", ByteSizeValue.ofMb(768).getBytes()) @@ -920,8 +919,7 @@ public void testAccountForSizeOfMisplacedShardsDuringNewComputation() { IndexRoutingTable.builder(indexMetadata2.getIndex()) .addShard(newShardRouting(index2ShardId, "node-2", true, INITIALIZING, index2SnapshotRecoverySource)) ); - // TODO enable in https://github.com/elastic/elasticsearch/pull/103903 - if (false && randomBoolean()) { + if (randomBoolean()) { // Shard is 75% downloaded clusterInfoBuilder // .withNodeUsedSpace("node-2", ByteSizeValue.ofMb(768).getBytes()) From 8500d33841b4193378eded6a73e2514afc66572c Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 9 Jan 2024 10:53:42 +0000 Subject: [PATCH 36/47] Fix doc typos --- docs/reference/esql/esql-async-query-api.asciidoc | 4 ++-- docs/reference/esql/esql-rest.asciidoc | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index bd753f7132b37..0a78a923523cc 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -11,7 +11,7 @@ monitor its progress, and retrieve results when they become available. The API accepts the same parameters and request body as the synchronous <>, along with additional async related -properties as outline below. +properties as outlined below. [source,console] ---- @@ -49,7 +49,7 @@ requests. // TEST[skip: no access to query ID - may return response values] Otherwise, if the response's `is_running` value is `false`, the async -query has finished, and the results are returned. +query has finished and the results are returned. [source,console-result] ---- diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index 11b3e12787e29..d66ceb2eb4f1e 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -263,7 +263,7 @@ run an async {esql} query. Queries initiated by the async query API may return results or not. The `wait_for_completion_timeout` property determines how long to wait for the results. If the results are not available by this time, a -<> is return which +<> is returned which can be later used to retrieve the results. For example: [source,console] @@ -303,7 +303,7 @@ requests. To check the progress of an async query, use the <> with the query ID. Specify how long you'd like -for complete results in the `wait_for_completion_timeout` parameter. +to wait for complete results in the `wait_for_completion_timeout` parameter. [source,console] ---- @@ -311,7 +311,7 @@ GET /_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUT ---- // TEST[skip: no access to query ID - may return response values] -If the response's `is_running` value is `false`, the query has finished, +If the response's `is_running` value is `false`, the query has finished and the results are returned. [source,console-result] From eecac06b5ae6f07061d61a68d15416dda747dfc3 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 9 Jan 2024 12:45:36 +0100 Subject: [PATCH 37/47] ESQL: Use Point geometry in tests (#104120) Replace SpatialPoint abstraction with Point geometries. --- .../common/geo/SpatialPointTests.java | 7 +- .../org/elasticsearch/test/ESTestCase.java | 30 ------- .../compute/data/BasicBlockTests.java | 10 ++- .../operator/topn/TopNEncoderTests.java | 7 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 27 +------ .../esql/action/EsqlQueryResponseTests.java | 8 +- .../function/AbstractFunctionTestCase.java | 6 +- .../expression/function/TestCaseSupplier.java | 14 +++- .../scalar/convert/ToCartesianPointTests.java | 3 +- .../scalar/convert/ToGeoPointTests.java | 3 +- .../AbstractMultivalueFunctionTestCase.java | 13 ++-- .../xpack/ql/util/SpatialCoordinateTypes.java | 78 ++----------------- .../ql/util/SpatialCoordinateTypesTests.java | 19 +++-- 13 files changed, 64 insertions(+), 161 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java b/server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java index 6b106e013a43d..bb26193eb5023 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/SpatialPointTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.common.geo; +import org.apache.lucene.tests.geo.GeoTestUtil; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; @@ -32,7 +33,7 @@ public void testEqualsAndHashcode() { public void testCompareTo() { for (int i = 0; i < 100; i++) { - SpatialPoint point = randomValueOtherThanMany(p -> p.getX() < -170 || p.getX() > 170, ESTestCase::randomGeoPoint); + SpatialPoint point = randomValueOtherThanMany(p -> p.getX() < -170 || p.getX() > 170, SpatialPointTests::randomGeoPoint); GeoPoint smaller = new GeoPoint(point.getY(), point.getX() - 1); GeoPoint bigger = new GeoPoint(point.getY(), point.getX() + 1); TestPoint testSmaller = new TestPoint(smaller); @@ -58,6 +59,10 @@ private void assertNotEqualsAndHashcode(String message, SpatialPoint a, SpatialP assertThat("Compare: " + message, a.compareTo(b), not(equalTo(0))); } + private static GeoPoint randomGeoPoint() { + return new GeoPoint(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude()); + } + /** * This test class used to be trivial, when SpatialPoint was a concrete class. * If we ever revert back to a concrete class, we can simplify this test class. diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 9cba1026947a6..c072f5643a5cd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -51,8 +51,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -1194,34 +1192,6 @@ public static String randomDateFormatterPattern() { return randomFrom(FormatNames.values()).getName(); } - /** - * Generate a random valid point constrained to geographic ranges (lat, lon ranges). - */ - public static SpatialPoint randomGeoPoint() { - double lat = randomDoubleBetween(-90, 90, true); - double lon = randomDoubleBetween(-180, 180, true); - return new GeoPoint(lat, lon); - } - - /** - * Generate a random valid point constrained to cartesian ranges. - */ - public static SpatialPoint randomCartesianPoint() { - double x = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true); - double y = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true); - return new SpatialPoint() { - @Override - public double getX() { - return x; - } - - @Override - public double getY() { - return y; - } - }; - } - /** * helper to randomly perform on consumer with value */ diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index c75bf583dbf36..7681b147824a5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; @@ -22,6 +21,9 @@ import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Point; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -445,11 +447,11 @@ public void testBytesRefBlock() { } public void testBytesRefBlockOnGeoPoints() { - testBytesRefBlock(() -> GEO.pointAsWKB(randomGeoPoint()), false, GEO::wkbAsString); + testBytesRefBlock(() -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), false, GEO::wkbAsString); } public void testBytesRefBlockOnCartesianPoints() { - testBytesRefBlock(() -> CARTESIAN.pointAsWKB(randomCartesianPoint()), false, CARTESIAN::wkbAsString); + testBytesRefBlock(() -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), false, CARTESIAN::wkbAsString); } public void testBytesRefBlockBuilderWithNulls() { @@ -895,7 +897,7 @@ public static RandomBlock randomBlock( List> values = new ArrayList<>(); try (var builder = elementType.newBlockBuilder(positionCount, blockFactory)) { boolean bytesRefFromPoints = randomBoolean(); - Supplier pointSupplier = randomBoolean() ? ESTestCase::randomGeoPoint : ESTestCase::randomCartesianPoint; + Supplier pointSupplier = randomBoolean() ? GeometryTestUtils::randomPoint : ShapeTestUtils::randomPoint; for (int p = 0; p < positionCount; p++) { int valueCount = between(minValuesPerPosition, maxValuesPerPosition); if (valueCount == 0 || nullAllowed && randomBoolean()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java index 5c6c96585f12b..6cd65a8c3c4a0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNEncoderTests.java @@ -11,8 +11,9 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.test.ESTestCase; @@ -139,8 +140,8 @@ static Version randomVersion() { } static BytesRef randomPointAsWKB() { - SpatialPoint point = randomBoolean() ? randomGeoPoint() : randomCartesianPoint(); - byte[] wkb = WellKnownBinary.toWKB(new Point(point.getX(), point.getY()), ByteOrder.LITTLE_ENDIAN); + Point point = randomBoolean() ? GeometryTestUtils.randomPoint() : ShapeTestUtils.randomPoint(); + byte[] wkb = WellKnownBinary.toWKB(point, ByteOrder.LITTLE_ENDIAN); return new BytesRef(wkb); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 5311cbe78e6b0..fd686ec48bb79 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -162,31 +161,7 @@ protected void assertResults( Logger logger ) { assertMetadata(expected, actualColumns, logger); - assertData(expected, actualValues, testCase.ignoreOrder, logger, EsqlSpecTestCase::valueToString); - } - - /** - * Unfortunately the GeoPoint.toString method returns the old format, but cannot be changed due to BWC. - * So we need to custom format GeoPoint as well as wrap Lists to ensure this custom conversion applies to multi-value fields - */ - private static String valueToString(Object value) { - if (value == null) { - return "null"; - } else if (value instanceof List list) { - StringBuilder sb = new StringBuilder("["); - for (Object field : list) { - if (sb.length() > 1) { - sb.append(", "); - } - sb.append(valueToString(field)); - } - return sb.append("]").toString(); - } else if (value instanceof SpatialPoint point) { - // Alternatively we could just change GeoPoint.toString() to use WKT, but that has other side-effects - return point.toWKT(); - } else { - return value.toString(); - } + assertData(expected, actualValues, testCase.ignoreOrder, logger, value -> value == null ? "null" : value.toString()); } private Throwable reworkException(Throwable th) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index ec21386612be3..fa5334fb33ef7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -33,6 +33,8 @@ import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -148,8 +150,10 @@ private Page randomPage(List columns) { new BytesRef(UnsupportedValueSource.UNSUPPORTED_OUTPUT) ); case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); - case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.pointAsWKB(randomGeoPoint())); - case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.pointAsWKB(randomCartesianPoint())); + case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.pointAsWKB(GeometryTestUtils.randomPoint())); + case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()) + ); case "null" -> builder.appendNull(); case "_source" -> { try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 5f8daa0b40e09..de9f5e1aedabf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -28,6 +28,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.Releasables; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.logging.LogManager; import org.elasticsearch.test.ESTestCase; @@ -125,8 +127,8 @@ public static Literal randomLiteral(DataType type) { case "time_duration" -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days case "text" -> new BytesRef(randomAlphaOfLength(50)); case "version" -> randomVersion().toBytesRef(); - case "geo_point" -> GEO.pointAsWKB(randomGeoPoint()); - case "cartesian_point" -> CARTESIAN.pointAsWKB(randomCartesianPoint()); + case "geo_point" -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()); + case "cartesian_point" -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()); case "null" -> null; case "_source" -> { try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 762d35ce80370..15684044a7881 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -10,6 +10,8 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -41,8 +43,6 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; -import static org.elasticsearch.test.ESTestCase.randomCartesianPoint; -import static org.elasticsearch.test.ESTestCase.randomGeoPoint; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; @@ -913,12 +913,18 @@ public static List timeDurationCases() { } private static List geoPointCases() { - return List.of(new TypedDataSupplier("", () -> GEO.pointAsWKB(randomGeoPoint()), EsqlDataTypes.GEO_POINT)); + return List.of( + new TypedDataSupplier("", () -> GEO.pointAsWKB(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT) + ); } private static List cartesianPointCases() { return List.of( - new TypedDataSupplier("", () -> CARTESIAN.pointAsWKB(randomCartesianPoint()), EsqlDataTypes.CARTESIAN_POINT) + new TypedDataSupplier( + "", + () -> CARTESIAN.pointAsWKB(ShapeTestUtils.randomPoint()), + EsqlDataTypes.CARTESIAN_POINT + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 6f45acbdd3b24..399ce11ab3d4c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -76,7 +77,7 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(CARTESIAN.pointAsString(randomCartesianPoint())), + () -> new BytesRef(CARTESIAN.pointAsString(ShapeTestUtils.randomPoint())), DataTypes.KEYWORD ) ), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 7eef2b7c15a28..51b58f4467fc3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -68,7 +69,7 @@ public static Iterable parameters() { List.of( new TestCaseSupplier.TypedDataSupplier( "", - () -> new BytesRef(GEO.pointAsString(randomGeoPoint())), + () -> new BytesRef(GEO.pointAsString(GeometryTestUtils.randomPoint())), DataTypes.KEYWORD ) ), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index d064ac1931c33..6f0a2edafaf04 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -8,8 +8,10 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Point; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -413,7 +415,7 @@ protected static void geoPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - points(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, ESTestCase::randomGeoPoint, matcher); + points(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); } /** @@ -448,7 +450,7 @@ protected static void cartesianPoints( EsqlDataTypes.CARTESIAN_POINT, expectedDataType, CARTESIAN, - ESTestCase::randomCartesianPoint, + ShapeTestUtils::randomPoint, matcher ); } @@ -463,12 +465,11 @@ protected static void points( DataType dataType, DataType expectedDataType, SpatialCoordinateTypes spatial, - Supplier randomPoint, + Supplier randomPoint, BiFunction, Matcher> matcher ) { cases.add(new TestCaseSupplier(name + "(" + dataType.typeName() + ")", List.of(dataType), () -> { - SpatialPoint point = randomPoint.get(); - BytesRef wkb = spatial.pointAsWKB(point); + BytesRef wkb = spatial.pointAsWKB(randomPoint.get()); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(wkb), dataType, "field")), evaluatorName + "[field=Attribute[channel=0]]", diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index e1fdd6f364258..38e01c9f857cf 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -10,8 +10,6 @@ import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.geo.XYEncodingUtils; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.utils.GeometryValidator; @@ -25,8 +23,8 @@ public enum SpatialCoordinateTypes { GEO { - public SpatialPoint longAsPoint(long encoded) { - return new GeoPoint(GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)), GeoEncodingUtils.decodeLongitude((int) encoded)); + public Point longAsPoint(long encoded) { + return new Point(GeoEncodingUtils.decodeLongitude((int) encoded), GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32))); } public long pointAsLong(double x, double y) { @@ -36,14 +34,8 @@ public long pointAsLong(double x, double y) { } }, CARTESIAN { - public SpatialPoint longAsPoint(long encoded) { - try { - final double x = XYEncodingUtils.decode((int) (encoded >>> 32)); - final double y = XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFF)); - return makePoint(x, y); - } catch (Error e) { - throw new IllegalArgumentException("Failed to convert invalid encoded value to cartesian point"); - } + public Point longAsPoint(long encoded) { + return new Point(XYEncodingUtils.decode((int) (encoded >>> 32)), XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFF))); } public long pointAsLong(double x, double y) { @@ -51,70 +43,14 @@ public long pointAsLong(double x, double y) { final long yi = XYEncodingUtils.encode((float) y); return (yi & 0xFFFFFFFFL) | xi << 32; } - - private SpatialPoint makePoint(double x, double y) { - return new SpatialPoint() { - @Override - public double getX() { - return x; - } - - @Override - public double getY() { - return y; - } - - @Override - public int hashCode() { - return 31 * Double.hashCode(x) + Double.hashCode(y); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj instanceof SpatialPoint other) { - return x == other.getX() && y == other.getY(); - } - return false; - } - - @Override - public String toString() { - return toWKT(); - } - }; - } }; - public abstract SpatialPoint longAsPoint(long encoded); - - public long pointAsLong(SpatialPoint point) { - return pointAsLong(point.getX(), point.getY()); - } + public abstract Point longAsPoint(long encoded); public abstract long pointAsLong(double x, double y); - public String pointAsString(SpatialPoint point) { - return point.toWKT(); - } - - public Point stringAsPoint(String string) { - try { - Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, string); - if (geometry instanceof Point point) { - return point; - } else { - throw new IllegalArgumentException("Unsupported geometry type " + geometry.type()); - } - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); - } - } - - public BytesRef pointAsWKB(SpatialPoint point) { - return pointAsWKB(new Point(point.getX(), point.getY())); + public String pointAsString(Point point) { + return WellKnownText.toWKT(point); } public BytesRef pointAsWKB(Point point) { diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index b52688877b1d9..ca650bf29662f 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.ql.util; -import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Point; import org.elasticsearch.test.ESTestCase; @@ -22,10 +23,10 @@ public class SpatialCoordinateTypesTests extends ESTestCase { private static final Map types = new LinkedHashMap<>(); static { - types.put(SpatialCoordinateTypes.GEO, new TestTypeFunctions(ESTestCase::randomGeoPoint, v -> 1e-5)); + types.put(SpatialCoordinateTypes.GEO, new TestTypeFunctions(GeometryTestUtils::randomPoint, v -> 1e-5)); types.put( SpatialCoordinateTypes.CARTESIAN, - new TestTypeFunctions(ESTestCase::randomCartesianPoint, SpatialCoordinateTypesTests::cartesianError) + new TestTypeFunctions(ShapeTestUtils::randomPoint, SpatialCoordinateTypesTests::cartesianError) ); } @@ -34,15 +35,15 @@ private static double cartesianError(double v) { return (abs < 1) ? 1e-5 : abs / 1e7; } - record TestTypeFunctions(Supplier randomPoint, Function error) {} + record TestTypeFunctions(Supplier randomPoint, Function error) {} public void testEncoding() { for (var type : types.entrySet()) { for (int i = 0; i < 10; i++) { SpatialCoordinateTypes coordType = type.getKey(); - SpatialPoint original = type.getValue().randomPoint().get(); + Point original = type.getValue().randomPoint().get(); var error = type.getValue().error; - SpatialPoint point = coordType.longAsPoint(coordType.pointAsLong(original)); + Point point = coordType.longAsPoint(coordType.pointAsLong(original.getX(), original.getY())); assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getY()))); assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getX()))); } @@ -53,10 +54,8 @@ public void testParsing() { for (var type : types.entrySet()) { for (int i = 0; i < 10; i++) { SpatialCoordinateTypes coordType = type.getKey(); - SpatialPoint geoPoint = type.getValue().randomPoint.get(); - Point point = coordType.stringAsPoint(coordType.pointAsString(geoPoint)); - assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(geoPoint.getY(), 1e-5)); - assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(geoPoint.getX(), 1e-5)); + Point point = type.getValue().randomPoint.get(); + assertEquals(coordType.wkbAsString(coordType.pointAsWKB(point)), coordType.pointAsString(point)); } } } From 770fc19b1406255f2505390b9d1a8b0c72569d42 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 9 Jan 2024 13:03:58 +0100 Subject: [PATCH 38/47] ESQL: add date_diff function (#104118) Same as https://github.com/elastic/elasticsearch/pull/103208 Fixes #101942 We had to revert it after a Checkstyle failure (strange it didn't pop up in the CI before merging) --- docs/changelog/104118.yaml | 6 + .../esql/functions/date_diff.asciidoc | 37 +++ .../esql/functions/signature/date_diff.svg | 1 + .../esql/functions/types/date_diff.asciidoc | 6 + .../src/main/resources/date.csv-spec | 61 +++++ .../src/main/resources/show.csv-spec | 6 +- .../date/DateDiffConstantEvaluator.java | 154 ++++++++++++ .../scalar/date/DateDiffEvaluator.java | 176 ++++++++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../function/scalar/date/DateDiff.java | 220 ++++++++++++++++++ .../function/scalar/date/DateTimeField.java | 50 ++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 15 ++ .../function/scalar/date/DateDiffTests.java | 192 +++++++++++++++ 13 files changed, 924 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/104118.yaml create mode 100644 docs/reference/esql/functions/date_diff.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_diff.svg create mode 100644 docs/reference/esql/functions/types/date_diff.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java diff --git a/docs/changelog/104118.yaml b/docs/changelog/104118.yaml new file mode 100644 index 0000000000000..f5afb199bc5eb --- /dev/null +++ b/docs/changelog/104118.yaml @@ -0,0 +1,6 @@ +pr: 104118 +summary: "ESQL: add `date_diff` function" +area: ES|QL +type: enhancement +issues: + - 101942 diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc new file mode 100644 index 0000000000000..6127290466b10 --- /dev/null +++ b/docs/reference/esql/functions/date_diff.asciidoc @@ -0,0 +1,37 @@ +[discrete] +[[esql-date_diff]] +=== `DATE_DIFF` +Subtract the second argument from the third argument and return their difference in multiples of the unit specified in the first argument. +If the second argument (start) is greater than the third argument (end), then negative values are returned. + +[cols="^,^"] +|=== +2+h|Datetime difference units + +s|unit +s|abbreviations + +| year | years, yy, yyyy +| quarter | quarters, qq, q +| month | months, mm, m +| dayofyear | dy, y +| day | days, dd, d +| week | weeks, wk, ww +| weekday | weekdays, dw +| hour | hours, hh +| minute | minutes, mi, n +| second | seconds, ss, s +| millisecond | milliseconds, ms +| microsecond | microseconds, mcs +| nanosecond | nanoseconds, ns +|=== + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=dateDiff] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=dateDiff-result] +|=== + diff --git a/docs/reference/esql/functions/signature/date_diff.svg b/docs/reference/esql/functions/signature/date_diff.svg new file mode 100644 index 0000000000000..6563ec6576927 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_diff.svg @@ -0,0 +1 @@ +DATE_DIFF(unit,startTimestamp,endTimestamp) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc new file mode 100644 index 0000000000000..b4e5c6ad5e0b5 --- /dev/null +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -0,0 +1,6 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +unit | startTimestamp | endTimestamp | result +keyword | datetime | datetime | integer +text | datetime | datetime | integer +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 509257c4c8b4f..8dd9704fd2d4b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -337,6 +337,67 @@ AVG(salary):double | bucket:date // end::auto_bucket_in_agg-result[] ; +evalDateDiffInNanoAndMicroAndMilliSeconds#[skip:-8.12.99, reason:date_diff added in 8.13] +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T11:00:00.001Z") +| EVAL dd_ns1=date_diff("nanoseconds", date1, date2), dd_ns2=date_diff("ns", date1, date2) +| EVAL dd_mcs1=date_diff("microseconds", date1, date2), dd_mcs2=date_diff("mcs", date1, date2) +| EVAL dd_ms1=date_diff("milliseconds", date1, date2), dd_ms2=date_diff("ms", date1, date2) +| keep dd_ns1, dd_ns2, dd_mcs1, dd_mcs2, dd_ms1, dd_ms2 +; + +dd_ns1:integer | dd_ns2:integer | dd_mcs1:integer | dd_mcs2:integer | dd_ms1:integer | dd_ms2:integer +1000000 | 1000000 | 1000 | 1000 | 1 | 1 +; + +evalDateDiffInSecondsAndMinutesAndHours#[skip:-8.12.99, reason:date_diff added in 8.13] +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T12:00:00.000Z") +| EVAL dd_s1=date_diff("seconds", date1, date2), dd_s2=date_diff("ss", date1, date2), dd_s3=date_diff("s", date1, date2) +| EVAL dd_m1=date_diff("minutes", date1, date2), dd_m2=date_diff("mi", date1, date2), dd_m3=date_diff("n", date1, date2) +| EVAL dd_h1=date_diff("hours", date1, date2), dd_h2=date_diff("hh", date1, date2) +| keep dd_s1, dd_s2, dd_s3, dd_m1, dd_m2, dd_m3, dd_h1, dd_h2 +; + +dd_s1:integer | dd_s2:integer | dd_s3:integer | dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_h1:integer | dd_h2:integer +3600 | 3600 | 3600 | 60 | 60 | 60 | 1 | 1 +; + +evalDateDiffInDaysAndWeeks#[skip:-8.12.99, reason:date_diff added in 8.13] +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-24T11:00:00.000Z") +| EVAL dd_wd1=date_diff("weekdays", date1, date2), dd_wd2=date_diff("dw", date1, date2) +| EVAL dd_w1=date_diff("weeks", date1, date2), dd_w2=date_diff("wk", date1, date2), dd_w3=date_diff("ww", date1, date2) +| EVAL dd_d1=date_diff("dy", date1, date2), dd_d2=date_diff("y", date1, date2) +| EVAL dd_dy1=date_diff("days", date1, date2), dd_dy2=date_diff("dd", date1, date2), dd_dy3=date_diff("d", date1, date2) +| keep dd_wd1, dd_wd2, dd_w1, dd_w2, dd_w3, dd_d1, dd_d2, dd_dy1, dd_dy2, dd_dy3 +; + +dd_wd1:integer | dd_wd2:integer | dd_w1:integer | dd_w2:integer | dd_w3:integer | dd_d1:integer | dd_d2:integer | dd_dy1:integer | dd_dy2:integer | dd_dy3:integer +22 | 22 | 3 | 3 | 3 | 22 | 22 | 22 | 22 | 22 +; + +evalDateDiffInMonthsAndQuartersAndYears#[skip:-8.12.99, reason:date_diff added in 8.13] +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2024-12-24T11:00:00.000Z") +| EVAL dd_m1=date_diff("months", date1, date2), dd_m2=date_diff("mm", date1, date2), dd_m3=date_diff("m", date1, date2) +| EVAL dd_q1=date_diff("quarters", date1, date2), dd_q2=date_diff("qq", date1, date2), dd_q3=date_diff("q", date1, date2) +| EVAL dd_y1=date_diff("years", date1, date2), dd_y2=date_diff("yyyy", date1, date2), dd_y3=date_diff("yy", date1, date2) +| keep dd_m1, dd_m2, dd_m3, dd_q1, dd_q2, dd_q3, dd_y1, dd_y2, dd_y3 +; + +dd_m1:integer | dd_m2:integer | dd_m3:integer | dd_q1:integer | dd_q2:integer | dd_q3:integer | dd_y1:integer | dd_y2:integer | dd_y3:integer +12 | 12 | 12 | 4 | 4 | 4 | 1 | 1 | 1 +; + +evalDateDiffErrorOutOfIntegerRange#[skip:-8.12.99, reason:date_diff added in 8.13] +ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-23T11:00:00.000Z") +| EVAL dd_oo=date_diff("nanoseconds", date1, date2) +| keep dd_oo +; +warning: Line 2:14: evaluation of [date_diff(\"nanoseconds\", date1, date2)] failed, treating result as null. Only first 20 failures recorded. +warning: Line 2:14: org.elasticsearch.xpack.ql.InvalidArgumentException: [1814400000000000] out of [integer] range + +dd_oo:integer +null +; + evalDateParseWithSimpleDate row a = "2023-02-01" | eval b = date_parse("yyyy-MM-dd", a) | keep b; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 024ccc9883be2..ef8ff3b3e6064 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -26,6 +26,7 @@ cos |"double cos(n:integer|long|double|unsigned_long)" cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_format |? date_format(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|[A valid date pattern, A string representing a date]|date |Parses a string into a date value | [true, false] | false @@ -117,6 +118,7 @@ synopsis:keyword "double cosh(n:integer|long|double|unsigned_long)" ? count(arg1:?) ? count_distinct(arg1:?, arg2:?) +"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" ? date_extract(arg1:?, arg2:?) ? date_format(arg1:?, arg2:?) "date date_parse(?datePattern:keyword, dateString:keyword|text)" @@ -205,9 +207,9 @@ is_nan |boolean is_nan(n:double) // see https://github.com/elastic/elasticsearch/issues/102120 -countFunctions#[skip:-8.11.99] +countFunctions#[skip:-8.12.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -84 | 84 | 84 +85 | 85 | 85 ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java new file mode 100644 index 0000000000000..3cb41d0028d54 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantEvaluator.java @@ -0,0 +1,154 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + public DateDiffConstantEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.process(datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantEvaluator get(DriverContext context) { + return new DateDiffConstantEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java new file mode 100644 index 0000000000000..952a819a014a9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffEvaluator.java @@ -0,0 +1,176 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + public DateDiffEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.process(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.process(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffEvaluator get(DriverContext context) { + return new DateDiffEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index b0cdad5095bbe..3b76141fa541e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -160,6 +161,7 @@ private FunctionDefinition[][] functions() { def(EndsWith.class, EndsWith::new, "ends_with") }, // date new FunctionDefinition[] { + def(DateDiff.class, DateDiff::new, "date_diff"), def(DateExtract.class, DateExtract::new, "date_extract"), def(DateFormat.class, DateFormat::new, "date_format"), def(DateParse.class, DateParse::new, "date_parse"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java new file mode 100644 index 0000000000000..63184774540b0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; +import java.time.temporal.IsoFields; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; + +/** + * Subtract the second argument from the third argument and return their difference + * in multiples of the unit specified in the first argument. + * If the second argument (start) is greater than the third argument (end), then negative values are returned. + */ +public class DateDiff extends ScalarFunction implements OptionalArgument, EvaluatorMapper { + + public static final ZoneId UTC = ZoneId.of("Z"); + + private final Expression unit; + private final Expression startTimestamp; + private final Expression endTimestamp; + + /** + * Represents units that can be used for DATE_DIFF function and how the difference + * between 2 dates is calculated + */ + public enum Part implements DateTimeField { + + YEAR((start, end) -> end.getYear() - start.getYear(), "years", "yyyy", "yy"), + QUARTER((start, end) -> safeToInt(IsoFields.QUARTER_YEARS.between(start, end)), "quarters", "qq", "q"), + MONTH((start, end) -> safeToInt(ChronoUnit.MONTHS.between(start, end)), "months", "mm", "m"), + DAYOFYEAR((start, end) -> safeToInt(ChronoUnit.DAYS.between(start, end)), "dy", "y"), + DAY(DAYOFYEAR::diff, "days", "dd", "d"), + WEEK((start, end) -> safeToInt(ChronoUnit.WEEKS.between(start, end)), "weeks", "wk", "ww"), + WEEKDAY(DAYOFYEAR::diff, "weekdays", "dw"), + HOUR((start, end) -> safeToInt(ChronoUnit.HOURS.between(start, end)), "hours", "hh"), + MINUTE((start, end) -> safeToInt(ChronoUnit.MINUTES.between(start, end)), "minutes", "mi", "n"), + SECOND((start, end) -> safeToInt(ChronoUnit.SECONDS.between(start, end)), "seconds", "ss", "s"), + MILLISECOND((start, end) -> safeToInt(ChronoUnit.MILLIS.between(start, end)), "milliseconds", "ms"), + MICROSECOND((start, end) -> safeToInt(ChronoUnit.MICROS.between(start, end)), "microseconds", "mcs"), + NANOSECOND((start, end) -> safeToInt(ChronoUnit.NANOS.between(start, end)), "nanoseconds", "ns"); + + private static final Map NAME_TO_PART = DateTimeField.initializeResolutionMap(values()); + + private final BiFunction diffFunction; + private final Set aliases; + + Part(BiFunction diffFunction, String... aliases) { + this.diffFunction = diffFunction; + this.aliases = Set.of(aliases); + } + + public Integer diff(ZonedDateTime startTimestamp, ZonedDateTime endTimestamp) { + return diffFunction.apply(startTimestamp, endTimestamp); + } + + @Override + public Iterable aliases() { + return aliases; + } + + public static Part resolve(String dateTimeUnit) { + Part datePartField = DateTimeField.resolveMatch(NAME_TO_PART, dateTimeUnit); + if (datePartField == null) { + List similar = DateTimeField.findSimilar(NAME_TO_PART.keySet(), dateTimeUnit); + String errorMessage; + if (similar.isEmpty() == false) { + errorMessage = String.format( + Locale.ROOT, + "Received value [%s] is not valid date part to add; did you mean %s?", + dateTimeUnit, + similar + ); + } else { + errorMessage = String.format( + Locale.ROOT, + "A value of %s or their aliases is required; received [%s]", + Arrays.asList(Part.values()), + dateTimeUnit + ); + } + throw new IllegalArgumentException(errorMessage); + } + + return datePartField; + } + } + + @FunctionInfo( + returnType = "integer", + description = "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" + ) + public DateDiff( + Source source, + @Param(name = "unit", type = { "keyword", "text" }, description = "A valid date unit") Expression unit, + @Param( + name = "startTimestamp", + type = { "date" }, + description = "A string representing a start timestamp" + ) Expression startTimestamp, + @Param(name = "endTimestamp", type = { "date" }, description = "A string representing an end timestamp") Expression endTimestamp + ) { + super(source, List.of(unit, startTimestamp, endTimestamp)); + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int process(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestamp), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestamp), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return process(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); + ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); + + if (unit.foldable()) { + try { + Part datePartField = Part.resolve(((BytesRef) unit.fold()).utf8ToString()); + return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); + } catch (IllegalArgumentException e) { + throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); + } + } + ExpressionEvaluator.Factory unitEvaluator = toEvaluator.apply(unit); + return new DateDiffEvaluator.Factory(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isString(unit, sourceText(), FIRST).and(isDate(startTimestamp, sourceText(), SECOND)) + .and(isDate(endTimestamp, sourceText(), THIRD)); + + if (resolution.unresolved()) { + return resolution; + } + + return TypeResolution.TYPE_RESOLVED; + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Override + public boolean foldable() { + return unit.foldable() && startTimestamp.foldable() && endTimestamp.foldable(); + } + + @Override + public DataType dataType() { + return DataTypes.INTEGER; + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new DateDiff(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, DateDiff::new, children().get(0), children().get(1), children().get(2)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java new file mode 100644 index 0000000000000..85651af67e8e3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTimeField.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import org.elasticsearch.xpack.ql.util.StringUtils; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +public interface DateTimeField { + + static Map initializeResolutionMap(D[] values) { + Map nameToPart = new HashMap<>(); + + for (D datePart : values) { + String lowerCaseName = datePart.name().toLowerCase(Locale.ROOT); + + nameToPart.put(lowerCaseName, datePart); + for (String alias : datePart.aliases()) { + nameToPart.put(alias, datePart); + } + } + return Collections.unmodifiableMap(nameToPart); + } + + static List initializeValidValues(D[] values) { + return Arrays.stream(values).map(D::name).collect(Collectors.toList()); + } + + static D resolveMatch(Map resolutionMap, String possibleMatch) { + return resolutionMap.get(possibleMatch.toLowerCase(Locale.ROOT)); + } + + static List findSimilar(Iterable similars, String match) { + return StringUtils.findSimilar(match, similars); + } + + String name(); + + Iterable aliases(); +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 8feb8fb18f443..4f03f7a7d72ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; @@ -350,6 +351,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, Concat.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), + of(ScalarFunction.class, DateDiff.class, PlanNamedTypes::writeDateDiff, PlanNamedTypes::readDateDiff), of(ScalarFunction.class, DateExtract.class, PlanNamedTypes::writeDateExtract, PlanNamedTypes::readDateExtract), of(ScalarFunction.class, DateFormat.class, PlanNamedTypes::writeDateFormat, PlanNamedTypes::readDateFormat), of(ScalarFunction.class, DateParse.class, PlanNamedTypes::writeDateTimeParse, PlanNamedTypes::readDateTimeParse), @@ -1294,6 +1296,19 @@ static void writeCountDistinct(PlanStreamOutput out, CountDistinct countDistinct out.writeOptionalWriteable(fields.size() == 2 ? o -> out.writeExpression(fields.get(1)) : null); } + static DateDiff readDateDiff(PlanStreamInput in) throws IOException { + return new DateDiff(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression()); + } + + static void writeDateDiff(PlanStreamOutput out, DateDiff function) throws IOException { + out.writeNoSource(); + List fields = function.children(); + assert fields.size() == 3; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + out.writeExpression(fields.get(2)); + } + static DateExtract readDateExtract(PlanStreamInput in) throws IOException { return new DateExtract(in.readSource(), in.readExpression(), in.readExpression(), in.configuration()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java new file mode 100644 index 0000000000000..15d0cca454407 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -0,0 +1,192 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.InvalidArgumentException; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.ZonedDateTime; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class DateDiffTests extends AbstractFunctionTestCase { + public DateDiffTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:30Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-05T10:45:00Z"); + + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Date Diff In Seconds - OK", + List.of(DataTypes.KEYWORD, DataTypes.DATETIME, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataTypes.INTEGER, + equalTo(88170) + ) + ), + new TestCaseSupplier( + "Date Diff In Seconds with text- OK", + List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataTypes.INTEGER, + equalTo(88170) + ) + ), + new TestCaseSupplier( + "Date Diff Error Type unit", + List.of(DataTypes.INTEGER, DataTypes.DATETIME, DataTypes.DATETIME), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataTypes.INTEGER, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "first argument of [] must be [string], found value [unit] type [integer]" + ) + ), + new TestCaseSupplier( + "Date Diff Error Type startTimestamp", + List.of(DataTypes.TEXT, DataTypes.INTEGER, DataTypes.DATETIME), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.INTEGER, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.DATETIME, "endTimestamp") + ), + "second argument of [] must be [datetime], found value [startTimestamp] type [integer]" + ) + ), + new TestCaseSupplier( + "Date Diff Error Type endTimestamp", + List.of(DataTypes.TEXT, DataTypes.DATETIME, DataTypes.INTEGER), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("minutes"), DataTypes.TEXT, "unit"), + new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataTypes.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataTypes.INTEGER, "endTimestamp") + ), + "third argument of [] must be [datetime], found value [endTimestamp] type [integer]" + ) + ) + ) + ); + } + + public void testDateDiffFunction() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:15:01Z"); + long startTimestamp = zdtStart.toInstant().toEpochMilli(); + long endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + assertEquals(1000000000, DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp)); + assertEquals(1000000000, DateDiff.process(new BytesRef("ns"), startTimestamp, endTimestamp)); + assertEquals(1000000, DateDiff.process(new BytesRef("microseconds"), startTimestamp, endTimestamp)); + assertEquals(1000000, DateDiff.process(new BytesRef("mcs"), startTimestamp, endTimestamp)); + assertEquals(1000, DateDiff.process(new BytesRef("milliseconds"), startTimestamp, endTimestamp)); + assertEquals(1000, DateDiff.process(new BytesRef("ms"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("seconds"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("ss"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("s"), startTimestamp, endTimestamp)); + + zdtEnd = zdtEnd.plusYears(1); + endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + assertEquals(527040, DateDiff.process(new BytesRef("minutes"), startTimestamp, endTimestamp)); + assertEquals(527040, DateDiff.process(new BytesRef("mi"), startTimestamp, endTimestamp)); + assertEquals(527040, DateDiff.process(new BytesRef("n"), startTimestamp, endTimestamp)); + assertEquals(8784, DateDiff.process(new BytesRef("hours"), startTimestamp, endTimestamp)); + assertEquals(8784, DateDiff.process(new BytesRef("hh"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("weekdays"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dw"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("weeks"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("wk"), startTimestamp, endTimestamp)); + assertEquals(52, DateDiff.process(new BytesRef("ww"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("days"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dd"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("d"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("dy"), startTimestamp, endTimestamp)); + assertEquals(366, DateDiff.process(new BytesRef("y"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("months"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("mm"), startTimestamp, endTimestamp)); + assertEquals(12, DateDiff.process(new BytesRef("m"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("quarters"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("qq"), startTimestamp, endTimestamp)); + assertEquals(4, DateDiff.process(new BytesRef("q"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("years"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("yyyy"), startTimestamp, endTimestamp)); + assertEquals(1, DateDiff.process(new BytesRef("yy"), startTimestamp, endTimestamp)); + } + + public void testDateDiffFunctionErrorTooLarge() { + ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); + ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:20:00Z"); + long startTimestamp = zdtStart.toInstant().toEpochMilli(); + long endTimestamp = zdtEnd.toInstant().toEpochMilli(); + + InvalidArgumentException e = expectThrows( + InvalidArgumentException.class, + () -> DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp) + ); + assertThat(e.getMessage(), containsString("[300000000000] out of [integer] range")); + } + + public void testDateDiffFunctionErrorUnitNotValid() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "Received value [sseconds] is not valid date part to add; " + + "did you mean [seconds, second, nanoseconds, milliseconds, microseconds, nanosecond]?" + ) + ); + + e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, " + + "NANOSECOND] or their aliases is required; received [not-valid-unit]" + ) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new DateDiff(source, args.get(0), args.get(1), args.get(2)); + } +} From 469b5b7d054e403df9548ec867481bdb817bc8d6 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 9 Jan 2024 12:29:03 +0000 Subject: [PATCH 39/47] Log rejection in SLMGetExpiredSnapshotsAction (#104114) We log failures encountered while computing the expired snapshots, but in principle we could fail to even execute that computation. This commit moves the exception handling to the listener wrapped by the `ActionRunnable` so that we get a log message on all failure paths. --- .../slm/SLMGetExpiredSnapshotsAction.java | 64 +++++++++---------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java index 40a93dfa11d80..c2f42c4cbfab4 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.repositories.GetSnapshotInfoContext; import org.elasticsearch.repositories.RepositoriesService; @@ -69,7 +68,6 @@ private SLMGetExpiredSnapshotsAction() { public static class LocalAction extends TransportAction { private final RepositoriesService repositoriesService; private final Executor retentionExecutor; - private final ThreadContext threadContext; private static final Logger logger = SLMGetExpiredSnapshotsAction.logger; @@ -77,9 +75,7 @@ public static class LocalAction extends TransportAction { public LocalAction(TransportService transportService, RepositoriesService repositoriesService, ActionFilters actionFilters) { super(INSTANCE.name(), actionFilters, transportService.getTaskManager()); this.repositoriesService = repositoriesService; - final var threadPool = transportService.getThreadPool(); - this.retentionExecutor = threadPool.executor(ThreadPool.Names.MANAGEMENT); - this.threadContext = threadPool.getThreadContext(); + this.retentionExecutor = transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT); } private static class ResultsBuilder { @@ -120,34 +116,36 @@ protected void doExecute(Task task, Request request, ActionListener li continue; } - retentionExecutor.execute( - ActionRunnable.wrap( - refs.acquireListener(), - perRepositoryListener -> SubscribableListener - - // Get repository data - .newForked(l -> repository.getRepositoryData(retentionExecutor, l)) - - // Collect snapshot details by policy, and get any missing details by reading SnapshotInfo - .andThen( - (l, repositoryData) -> getSnapshotDetailsByPolicy(retentionExecutor, repository, repositoryData, l) - ) - - // Compute snapshots to delete for each (relevant) policy - .andThenAccept(snapshotDetailsByPolicy -> { - resultsBuilder.addResult( - repositoryName, - getSnapshotsToDelete(repositoryName, request.policies(), snapshotDetailsByPolicy) - ); - }) - - // And notify this repository's listener on completion - .addListener(perRepositoryListener.delegateResponse((l, e) -> { - logger.debug(Strings.format("[%s]: could not compute expired snapshots", repositoryName), e); - l.onResponse(null); - })) - ) - ); + retentionExecutor.execute(ActionRunnable.wrap(ActionListener.releaseAfter(new ActionListener() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + logger.debug(Strings.format("[%s]: could not compute expired snapshots", repositoryName), e); + } + }, refs.acquire()), + perRepositoryListener -> SubscribableListener + + // Get repository data + .newForked(l -> repository.getRepositoryData(retentionExecutor, l)) + + // Collect snapshot details by policy, and get any missing details by reading SnapshotInfo + .andThen( + (l, repositoryData) -> getSnapshotDetailsByPolicy(retentionExecutor, repository, repositoryData, l) + ) + + // Compute snapshots to delete for each (relevant) policy + .andThenAccept(snapshotDetailsByPolicy -> { + resultsBuilder.addResult( + repositoryName, + getSnapshotsToDelete(repositoryName, request.policies(), snapshotDetailsByPolicy) + ); + }) + + // And notify this repository's listener on completion + .addListener(perRepositoryListener) + )); } } } From dcb019dc15165d73c9145d632ef1fb23231b09dc Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 9 Jan 2024 13:30:48 +0100 Subject: [PATCH 40/47] remove accidentally added file (#104126) --- x | 320 -------------------------------------------------------------- 1 file changed, 320 deletions(-) delete mode 100644 x diff --git a/x b/x deleted file mode 100644 index 444b7eef651c6..0000000000000 --- a/x +++ /dev/null @@ -1,320 +0,0 @@ -➜ elasticsearch git:(metric_name_validation) ./gradlew run -Dtests.es.logger.org.elasticsearch.telemetry.apm=debug - -> Configure project :x-pack:plugin:searchable-snapshots:qa:hdfs -hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\bin in PATH -======================================= -Elasticsearch Build Hamster says Hello! - Gradle Version : 8.5 - OS Info : Mac OS X 14.1.2 (aarch64) - JDK Version : 17.0.2+8-LTS-86 (Oracle) - JAVA_HOME : /Library/Java/JavaVirtualMachines/jdk-17.0.2.jdk/Contents/Home - Random Testing Seed : B705DEF03AA4BF36 - In FIPS 140 mode : false -======================================= - -> Task :run -[2023-12-18T13:31:10.882688Z] [BUILD] Copying additional config files from distro [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/jvm.options.d, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/users_roles, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/role_mapping.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/users, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/elasticsearch.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/roles.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/log4j2.properties, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/elasticsearch-plugins.example.yml, /Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/config/jvm.options] -[2023-12-18T13:31:10.885813Z] [BUILD] Creating elasticsearch keystore with password set to [] -[2023-12-18T13:31:11.630988Z] [BUILD] Adding 1 keystore settings and 0 keystore files -[2023-12-18T13:31:12.293546Z] [BUILD] Installing 0 modules -[2023-12-18T13:31:12.293769Z] [BUILD] Setting up 1 users -[2023-12-18T13:31:13.096132Z] [BUILD] Setting up roles.yml -[2023-12-18T13:31:13.096494Z] [BUILD] Starting Elasticsearch process -CompileCommand: exclude org/apache/lucene/util/MSBRadixSorter.computeCommonPrefixLengthAndBuildHistogram bool exclude = true -CompileCommand: exclude org/apache/lucene/util/RadixSelector.computeCommonPrefixLengthAndBuildHistogram bool exclude = true -Dec 18, 2023 2:31:14 PM sun.util.locale.provider.LocaleProviderAdapter -WARNING: COMPAT locale provider will be removed in a future release -[2023-12-18T14:31:15,025][INFO ][o.a.l.i.v.PanamaVectorizationProvider] [runTask-0] Java vector incubator API enabled; uses preferredBitSize=128 -[2023-12-18T14:31:15,437][INFO ][o.e.n.Node ] [runTask-0] version[8.13.0-SNAPSHOT], pid[43801], build[tar/25d9bbbb5327023f7f1896fb3044fb4c9d231342/2023-12-18T13:27:39.871866Z], OS[Mac OS X/14.1.2/aarch64], JVM[Oracle Corporation/OpenJDK 64-Bit Server VM/21/21+35-2513] -[2023-12-18T14:31:15,438][INFO ][o.e.n.Node ] [runTask-0] JVM home [/Users/przemyslawgomulka/.gradle/jdks/oracle_corporation-21-aarch64-os_x/jdk-21.jdk/Contents/Home], using bundled JDK [false] -[2023-12-18T14:31:15,438][INFO ][o.e.n.Node ] [runTask-0] JVM arguments [-Des.networkaddress.cache.ttl=60, -Des.networkaddress.cache.negative.ttl=10, -Djava.security.manager=allow, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -XX:-OmitStackTraceInFastThrow, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j2.formatMsgNoLookups=true, -Djava.locale.providers=SPI,COMPAT, --add-opens=java.base/java.io=org.elasticsearch.preallocate, -Des.distribution.type=tar, -XX:+UseG1GC, -Djava.io.tmpdir=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/tmp, --add-modules=jdk.incubator.vector, -XX:CompileCommand=exclude,org.apache.lucene.util.MSBRadixSorter::computeCommonPrefixLengthAndBuildHistogram, -XX:CompileCommand=exclude,org.apache.lucene.util.RadixSelector::computeCommonPrefixLengthAndBuildHistogram, -XX:+HeapDumpOnOutOfMemoryError, -XX:+ExitOnOutOfMemoryError, -XX:HeapDumpPath=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs, -XX:ErrorFile=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs/hs_err_pid%p.log, -Xlog:gc*,gc+age=trace,safepoint:file=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/logs/gc.log:utctime,level,pid,tags:filecount=32,filesize=64m, -Xms512m, -Xmx512m, -ea, -esa, -Dingest.geoip.downloader.enabled.default=true, -Dio.netty.leakDetection.level=paranoid, -XX:MaxDirectMemorySize=268435456, -XX:G1HeapRegionSize=4m, -XX:InitiatingHeapOccupancyPercent=30, -XX:G1ReservePercent=15, --module-path=/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/distribution/archives/darwin-aarch64-tar/build/install/elasticsearch-8.13.0-SNAPSHOT/lib, --add-modules=jdk.net, --add-modules=ALL-MODULE-PATH, -Djdk.module.main=org.elasticsearch.server] -[2023-12-18T14:31:15,438][WARN ][o.e.n.Node ] [runTask-0] version [8.13.0-SNAPSHOT] is a pre-release version of Elasticsearch and is not suitable for production -[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-url] -[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rest-root] -[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-core] -[2023-12-18T14:31:17,172][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-redact] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-user-agent] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-async-search] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-error-query] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-monitoring] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-s3] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-analytics] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ent-search] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-autoscaling] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-painless] -[2023-12-18T14:31:17,173][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ml] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-die-with-dignity] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [legacy-geo] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-mustache] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ql] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rank-rrf] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [analysis-common] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-seek-tracking-directory] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [health-shards-availability] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [transport-netty4] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [aggregations] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-common] -[2023-12-18T14:31:17,174][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-identity-provider] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [frozen-indices] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-text-structure] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-shutdown] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [snapshot-repo-test-kit] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ml-package-loader] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-delayed-aggs] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [kibana] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [constant-keyword] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-logstash] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-graph] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ccr] -[2023-12-18T14:31:17,175][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-esql] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [parent-join] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [counted-keyword] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-enrich] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repositories-metering-api] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [transform] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-azure] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [repository-gcs] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [spatial] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [apm] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [mapper-extras] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [mapper-version] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-rollup] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [percolator] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [data-streams] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-stack] -[2023-12-18T14:31:17,176][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [rank-eval] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [reindex] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-apm-integration] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-security] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [blob-cache] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [searchable-snapshots] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-slm] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [test-latency-simulating-directory] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [snapshot-based-recoveries] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-watcher] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [old-lucene-versions] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-ilm] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-inference] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-voting-only-node] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-deprecation] -[2023-12-18T14:31:17,177][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-fleet] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-profiling] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-aggregate-metric] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-downsample] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-geoip] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-write-load-forecaster] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [search-business-rules] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [wildcard] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [ingest-attachment] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-apm-data] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [unsigned-long] -[2023-12-18T14:31:17,178][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-sql] -[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [runtime-fields-common] -[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-async] -[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [vector-tile] -[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [lang-expression] -[2023-12-18T14:31:17,179][INFO ][o.e.p.PluginsService ] [runTask-0] loaded module [x-pack-eql] -[2023-12-18T14:31:17,224][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [failure_store] is enabled -[2023-12-18T14:31:17,520][INFO ][o.e.e.NodeEnvironment ] [runTask-0] using [1] data paths, mounts [[/System/Volumes/Data (/dev/disk3s5)]], net usable_space [636.9gb], net total_space [926.3gb], types [apfs] -[2023-12-18T14:31:17,520][INFO ][o.e.e.NodeEnvironment ] [runTask-0] heap size [512mb], compressed ordinary object pointers [true] -[2023-12-18T14:31:17,543][INFO ][o.e.n.Node ] [runTask-0] node name [runTask-0], node ID [RpeX_621SdCZSBh8-RSQvg], cluster name [runTask], roles [ingest, data_frozen, ml, data_hot, transform, data_content, data_warm, master, remote_cluster_client, data, data_cold] -[2023-12-18T14:31:19,294][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.shards.unassigned.count -[2023-12-18T14:31:19,294][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.shards.count -[2023-12-18T14:31:19,295][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.allocations.undesired.count -[2023-12-18T14:31:19,295][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.allocator.desired_balance.allocations.undesired.ratio -[2023-12-18T14:31:19,309][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [semantic_text] is enabled -[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.parent.trip.total -[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.field_data.trip.total -[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.request.trip.total -[2023-12-18T14:31:19,326][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.in_flight_requests.trip.total -[2023-12-18T14:31:19,329][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.model_inference.trip.total -[2023-12-18T14:31:19,329][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.breaker.eql_sequence.trip.total -[2023-12-18T14:31:19,350][INFO ][o.e.f.FeatureService ] [runTask-0] Registered local node features [features_supported, health.dsl.info, usage.data_tiers.precalculate_stats] -[2023-12-18T14:31:19,497][INFO ][o.e.x.m.p.l.CppLogMessageHandler] [runTask-0] [controller/43802] [Main.cc@123] controller (64 bit): Version 8.13.0-SNAPSHOT (Build c9c232240dd04f) Copyright (c) 2023 Elasticsearch BV -[2023-12-18T14:31:19,659][INFO ][o.e.t.a.APM ] [runTask-0] Sending apm metrics is disabled -[2023-12-18T14:31:19,660][INFO ][o.e.t.a.APM ] [runTask-0] Sending apm tracing is disabled -[2023-12-18T14:31:19,670][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_counter.count -[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_counter.count -[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_histogram.histogram -[2023-12-18T14:31:19,671][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_histogram.histogram -[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.double_gauge.total -[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.long_gauge.total -[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.async_long_counter.count -[2023-12-18T14:31:19,672][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.test.async_double_counter.count -[2023-12-18T14:31:19,672][INFO ][o.e.x.s.Security ] [runTask-0] Security is enabled -[2023-12-18T14:31:19,850][INFO ][o.e.x.s.a.s.FileRolesStore] [runTask-0] parsed [1] roles from file [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/config/roles.yml] -[2023-12-18T14:31:20,137][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.miss_that_triggered_read.count -[2023-12-18T14:31:20,138][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.evicted_used_regions.count -[2023-12-18T14:31:20,138][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.blob_cache.cache_miss_load.time -[2023-12-18T14:31:20,231][INFO ][o.e.x.w.Watcher ] [runTask-0] Watcher initialized components at 2023-12-18T13:31:20.231Z -[2023-12-18T14:31:20,256][INFO ][o.e.x.p.ProfilingPlugin ] [runTask-0] Profiling is enabled -[2023-12-18T14:31:20,267][INFO ][o.e.x.p.ProfilingPlugin ] [runTask-0] profiling index templates will not be installed or reinstalled -[2023-12-18T14:31:20,287][INFO ][o.e.x.a.APMPlugin ] [runTask-0] APM ingest plugin is disabled -[2023-12-18T14:31:20,408][INFO ][o.e.c.u.FeatureFlag ] [runTask-0] The current build is a snapshot, feature flag [connector_api] is enabled -[2023-12-18T14:31:20,553][INFO ][o.e.t.n.NettyAllocator ] [runTask-0] creating NettyAllocator with the following configs: [name=unpooled, suggested_max_allocation_size=1mb, factors={es.unsafe.use_unpooled_allocator=null, g1gc_enabled=true, g1gc_region_size=4mb, heap_size=512mb}] -[2023-12-18T14:31:20,566][INFO ][o.e.i.r.RecoverySettings ] [runTask-0] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b] -[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.requests.count -[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.exceptions.count -[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.throttles.count -[2023-12-18T14:31:20,567][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.operations.count -[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.operations.unsuccessful.count -[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.exceptions.histogram -[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.throttles.histogram -[2023-12-18T14:31:20,568][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.repositories.requests.http_request_time.histogram -[2023-12-18T14:31:20,591][INFO ][o.e.d.DiscoveryModule ] [runTask-0] using discovery type [multi-node] and seed hosts providers [settings, file] -[2023-12-18T14:31:20,619][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.health.overall.red.status -[2023-12-18T14:31:21,154][INFO ][o.e.n.Node ] [runTask-0] initialized -[2023-12-18T14:31:21,155][INFO ][o.e.n.Node ] [runTask-0] starting ... -[2023-12-18T14:31:21,182][INFO ][o.e.x.s.c.f.PersistentCache] [runTask-0] persistent cache index loaded -[2023-12-18T14:31:21,183][INFO ][o.e.x.d.l.DeprecationIndexingComponent] [runTask-0] deprecation component started -[2023-12-18T14:31:21,232][INFO ][o.e.t.TransportService ] [runTask-0] publish_address {127.0.0.1:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300} -[2023-12-18T14:31:21,322][WARN ][o.e.b.BootstrapChecks ] [runTask-0] Transport SSL must be enabled if security is enabled. Please set [xpack.security.transport.ssl.enabled] to [true] or disable security by setting [xpack.security.enabled] to [false]; for more information see [https://www.elastic.co/guide/en/elasticsearch/reference/master/bootstrap-checks-xpack.html#bootstrap-checks-tls] -[2023-12-18T14:31:21,323][INFO ][o.e.c.c.ClusterBootstrapService] [runTask-0] this node has not joined a bootstrapped cluster yet; [cluster.initial_master_nodes] is set to [runTask-0] -[2023-12-18T14:31:21,325][WARN ][o.e.d.FileBasedSeedHostsProvider] [runTask-0] expected, but did not find, a dynamic hosts list at [/Users/przemyslawgomulka/workspace/pgomulka/elasticsearch/build/testclusters/runTask-0/config/unicast_hosts.txt] -[2023-12-18T14:31:21,325][INFO ][o.e.c.c.Coordinator ] [runTask-0] setting initial configuration to VotingConfiguration{RpeX_621SdCZSBh8-RSQvg} -[2023-12-18T14:31:21,367][INFO ][o.e.h.AbstractHttpServerTransport] [runTask-0] publish_address {127.0.0.1:9200}, bound_addresses {[::1]:9200}, {127.0.0.1:9200} -[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.get.total -[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.get.time -[2023-12-18T14:31:21,372][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.search.fetch.total -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.search.fetch.time -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.merge.total -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.merge.time -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.operations.count -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.size -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.uncommitted_operations.count -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.uncommitted.size -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.translog.earliest_last_modified_age.time -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.transport.rx.size -[2023-12-18T14:31:21,373][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.transport.tx.size -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.young.size -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.survivor.size -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.jvm.mem.pools.old.size -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.fs.io_stats.io_time.total -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.docs.total -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.docs.total -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.failed.total -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.docs.total -[2023-12-18T14:31:21,374][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.docs.total -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.time -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.deletion.time -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.throttle.time -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.noop.total -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.memory.size.total -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.count.total -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.memory.size.total -[2023-12-18T14:31:21,375][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.count -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.coordinating_operations.rejections.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.memory.size.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.count.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.memory.size.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.count.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.primary_operations.rejections.total -[2023-12-18T14:31:21,376][DEBUG][o.e.t.a.APMMeterRegistry ] [runTask-0] Registering an instrument with name: es.node.stats.indices.indexing.memory.limit.total -[2023-12-18T14:31:21,376][INFO ][o.e.n.Node ] [runTask-0] started {runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}{ml.machine_memory=68719476736, ml.config_version=12.0.0, ml.max_jvm_size=536870912, ml.allocated_processors=10, xpack.installed=true, transform.config_version=10.0.0, testattr=test, ml.allocated_processors_double=10.0} -[2023-12-18T14:31:21,421][INFO ][o.e.c.s.MasterService ] [runTask-0] elected-as-master ([1] nodes joined in term 1)[_FINISH_ELECTION_, {runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007} completing election], term: 1, version: 1, delta: master node changed {previous [], current [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}]} -[2023-12-18T14:31:21,459][INFO ][o.e.c.c.CoordinationState] [runTask-0] cluster UUID set to [8TjOrE8yTw-_eqrhD95JPw] -[2023-12-18T14:31:21,484][INFO ][o.e.c.s.ClusterApplierService] [runTask-0] master node changed {previous [], current [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}]}, term: 1, version: 1, reason: Publication{term=1, version=1} -[2023-12-18T14:31:21,498][INFO ][o.e.c.f.AbstractFileWatchingService] [runTask-0] starting file watcher ... -[2023-12-18T14:31:21,500][INFO ][o.e.c.f.AbstractFileWatchingService] [runTask-0] file settings service up and running [tid=60] -[2023-12-18T14:31:21,503][INFO ][o.e.c.c.NodeJoinExecutor ] [runTask-0] node-join: [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}{2w_LQi0HTTetBZfSdpLG6Q}{runTask-0}{127.0.0.1}{127.0.0.1:9300}{cdfhilmrstw}{8.13.0}{7000099-8500007}] with reason [completing election] -[2023-12-18T14:31:21,560][INFO ][o.e.g.GatewayService ] [runTask-0] recovered [0] indices into cluster_state -[2023-12-18T14:31:21,638][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [behavioral_analytics-events-mappings] -[2023-12-18T14:31:21,650][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-sync-jobs-mappings] -[2023-12-18T14:31:21,652][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-settings] -[2023-12-18T14:31:21,654][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-sync-jobs-settings] -[2023-12-18T14:31:21,674][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-ent-search-mb] for index patterns [.monitoring-ent-search-8-*] -[2023-12-18T14:31:21,681][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [elastic-connectors-mappings] -[2023-12-18T14:31:21,694][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-logstash-mb] for index patterns [.monitoring-logstash-8-*] -[2023-12-18T14:31:21,697][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-state] for index patterns [.ml-state*] -[2023-12-18T14:31:21,702][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-notifications-000002] for index patterns [.ml-notifications-000002] -[2023-12-18T14:31:21,705][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [search-acl-filter] for index patterns [.search-acl-filter-*] -[2023-12-18T14:31:21,723][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-kibana-mb] for index patterns [.monitoring-kibana-8-*] -[2023-12-18T14:31:21,728][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-kibana] for index patterns [.monitoring-kibana-7-*] -[2023-12-18T14:31:21,734][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-logstash] for index patterns [.monitoring-logstash-7-*] -[2023-12-18T14:31:21,756][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-es] for index patterns [.monitoring-es-7-*] -[2023-12-18T14:31:21,763][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-beats] for index patterns [.monitoring-beats-7-*] -[2023-12-18T14:31:21,766][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding template [.monitoring-alerts-7] for index patterns [.monitoring-alerts-7] -[2023-12-18T14:31:21,775][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-anomalies-] for index patterns [.ml-anomalies-*] -[2023-12-18T14:31:21,786][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-beats-mb] for index patterns [.monitoring-beats-8-*] -[2023-12-18T14:31:21,802][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.monitoring-es-mb] for index patterns [.monitoring-es-8-*] -[2023-12-18T14:31:21,806][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.ml-stats] for index patterns [.ml-stats-*] -[2023-12-18T14:31:21,810][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics-mappings] -[2023-12-18T14:31:21,811][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-tsdb-settings] -[2023-12-18T14:31:21,813][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs-mappings] -[2023-12-18T14:31:21,816][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-mappings] -[2023-12-18T14:31:21,820][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [data-streams-mappings] -[2023-12-18T14:31:21,824][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [ecs@dynamic_templates] -[2023-12-18T14:31:21,825][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics-settings] -[2023-12-18T14:31:21,826][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics-settings] -[2023-12-18T14:31:21,828][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs@mappings] -[2023-12-18T14:31:21,829][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics@mappings] -[2023-12-18T14:31:21,831][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@settings] -[2023-12-18T14:31:21,832][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [synthetics@settings] -[2023-12-18T14:31:21,834][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@mappings] -[2023-12-18T14:31:21,836][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [data-streams@mappings] -[2023-12-18T14:31:21,839][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [ecs@mappings] -[2023-12-18T14:31:21,842][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.kibana-reporting] for index patterns [.kibana-reporting*] -[2023-12-18T14:31:21,843][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [metrics@tsdb-settings] -[2023-12-18T14:31:21,846][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.slm-history] for index patterns [.slm-history-6*] -[2023-12-18T14:31:21,849][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [ilm-history] for index patterns [ilm-history-6*] -[2023-12-18T14:31:21,851][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [.deprecation-indexing-mappings] -[2023-12-18T14:31:21,852][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [.deprecation-indexing-settings] -[2023-12-18T14:31:21,856][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-fromhost-data] for index patterns [.fleet-fileds-fromhost-data-*] -[2023-12-18T14:31:21,859][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-tohost-data] for index patterns [.fleet-fileds-tohost-data-*] -[2023-12-18T14:31:21,862][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-tohost-meta] for index patterns [.fleet-fileds-tohost-meta-*] -[2023-12-18T14:31:21,867][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.fleet-fileds-fromhost-meta] for index patterns [.fleet-fileds-fromhost-meta-*] -[2023-12-18T14:31:21,872][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.watch-history-16] for index patterns [.watcher-history-16*] -[2023-12-18T14:31:21,943][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [elastic-connectors-sync-jobs] for index patterns [.elastic-connectors-sync-jobs-v1] -[2023-12-18T14:31:21,946][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [synthetics] for index patterns [synthetics-*-*] -[2023-12-18T14:31:21,949][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [elastic-connectors] for index patterns [.elastic-connectors-v1] -[2023-12-18T14:31:21,952][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [metrics] for index patterns [metrics-*-*] -[2023-12-18T14:31:21,955][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [.deprecation-indexing-template] for index patterns [.logs-deprecation.*] -[2023-12-18T14:31:21,989][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.monitoring-8-ilm-policy] -[2023-12-18T14:31:22,028][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [ml-size-based-ilm-policy] -[2023-12-18T14:31:22,063][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [metrics] -[2023-12-18T14:31:22,091][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [synthetics] -[2023-12-18T14:31:22,119][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [30-days-default] -[2023-12-18T14:31:22,207][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline behavioral_analytics-events-final_pipeline -[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs-default-pipeline -[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@default-pipeline -[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline ent-search-generic-ingestion -[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@json-pipeline -[2023-12-18T14:31:22,208][INFO ][o.e.x.c.t.IndexTemplateRegistry] [runTask-0] adding ingest pipeline logs@json-message -[2023-12-18T14:31:22,210][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [behavioral_analytics-events-settings] -[2023-12-18T14:31:22,211][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs@settings] -[2023-12-18T14:31:22,211][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding component template [logs-settings] -[2023-12-18T14:31:22,258][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [behavioral_analytics-events-default] for index patterns [behavioral_analytics-events-*] -[2023-12-18T14:31:22,261][INFO ][o.e.c.m.MetadataIndexTemplateService] [runTask-0] adding index template [logs] for index patterns [logs-*-*] -[2023-12-18T14:31:22,293][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [logs] -[2023-12-18T14:31:22,319][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [180-days-default] -[2023-12-18T14:31:22,347][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [365-days-default] -[2023-12-18T14:31:22,378][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [90-days-default] -[2023-12-18T14:31:22,406][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [7-days-default] -[2023-12-18T14:31:22,448][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [logs@lifecycle] -[2023-12-18T14:31:22,488][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [metrics@lifecycle] -[2023-12-18T14:31:22,522][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [synthetics@lifecycle] -[2023-12-18T14:31:22,551][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [90-days@lifecycle] -[2023-12-18T14:31:22,587][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [180-days@lifecycle] -[2023-12-18T14:31:22,621][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [7-days@lifecycle] -[2023-12-18T14:31:22,648][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [365-days@lifecycle] -[2023-12-18T14:31:22,675][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [30-days@lifecycle] -[2023-12-18T14:31:22,701][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [watch-history-ilm-policy-16] -[2023-12-18T14:31:22,729][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [slm-history-ilm-policy] -[2023-12-18T14:31:22,770][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [ilm-history-ilm-policy] -[2023-12-18T14:31:22,809][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.deprecation-indexing-ilm-policy] -[2023-12-18T14:31:22,846][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-tohost-data-ilm-policy] -[2023-12-18T14:31:22,875][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-fromhost-meta-ilm-policy] -[2023-12-18T14:31:22,903][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-fromhost-data-ilm-policy] -[2023-12-18T14:31:22,932][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-actions-results-ilm-policy] -[2023-12-18T14:31:22,959][INFO ][o.e.x.i.a.TransportPutLifecycleAction] [runTask-0] adding index lifecycle policy [.fleet-file-tohost-meta-ilm-policy] -[2023-12-18T14:31:23,055][INFO ][o.e.h.n.s.HealthNodeTaskExecutor] [runTask-0] Node [{runTask-0}{RpeX_621SdCZSBh8-RSQvg}] is selected as the current health node. -[2023-12-18T14:31:23,113][INFO ][o.e.x.s.a.Realms ] [runTask-0] license mode is [basic], currently licensed security realms are [reserved/reserved,file/default_file,native/default_native] -[2023-12-18T14:31:23,114][INFO ][o.e.l.ClusterStateLicenseService] [runTask-0] license [d539d324-3578-49b7-8410-aa7c09249acf] mode [basic] - valid -<============-> 99% EXECUTING [34s] -> IDLE -> IDLE -> IDLE -> :run - - - From 0d53be3bed94daf761cf2b3c6e5d6d41cf93f4a4 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 9 Jan 2024 12:55:29 +0000 Subject: [PATCH 41/47] Expand uses of matchers for Optionals (#104123) Also add variants to assert for specific values --- .../provider/EmbeddedModulePathTests.java | 11 ++-- .../provider/InMemoryModuleFinderTests.java | 19 +++--- .../azure/AzureBlobContainerRetriesTests.java | 4 +- ...CloudStorageBlobContainerRetriesTests.java | 3 +- .../systemd/SystemdPluginTests.java | 18 +++-- .../action/RequestValidatorsTests.java | 24 +++---- .../common/network/NetworkUtilsTests.java | 6 +- .../index/engine/InternalEngineTests.java | 3 +- .../plugins/ExtensionLoaderTests.java | 4 +- .../plugins/PluginDescriptorTests.java | 5 +- .../RemoteConnectionManagerTests.java | 5 +- .../test/hamcrest/OptionalMatchers.java | 65 +++++++++++-------- .../test/hamcrest/OptionalMatchersTests.java | 55 ++++++++++++++++ .../AutoscalingNodesInfoServiceTests.java | 24 +++---- .../TrainedModelAssignmentTests.java | 18 ++--- .../xpack/core/ml/job/config/JobTests.java | 3 +- .../MlDeprecationCheckerTests.java | 3 +- .../xpack/enrich/EnrichRestartIT.java | 7 +- .../xpack/ml/integration/RegressionIT.java | 39 +++++------ .../MlMemoryAutoscalingDeciderTests.java | 11 ++-- .../DataFrameDataExtractorTests.java | 13 ++-- .../planning/AssignmentPlannerTests.java | 7 +- .../planning/PreserveAllAllocationsTests.java | 24 +++---- .../planning/PreserveOneAllocationTests.java | 13 ++-- 24 files changed, 231 insertions(+), 153 deletions(-) create mode 100644 test/framework/src/test/java/org/elasticsearch/test/hamcrest/OptionalMatchersTests.java diff --git a/libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedModulePathTests.java b/libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedModulePathTests.java index f4c83f1a77902..4571591bd6649 100644 --- a/libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedModulePathTests.java +++ b/libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedModulePathTests.java @@ -31,7 +31,7 @@ import static org.elasticsearch.test.hamcrest.ModuleDescriptorMatchers.exportsOf; import static org.elasticsearch.test.hamcrest.ModuleDescriptorMatchers.opensOf; import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; -import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -51,16 +51,13 @@ public void testVersion() { assertThat(over, isEmpty()); over = EmbeddedModulePath.version("foo-1.2.jar"); - assertThat(over, isPresent()); - assertThat(over.get(), is(Version.parse("1.2"))); + assertThat(over, isPresentWith(Version.parse("1.2"))); over = EmbeddedModulePath.version("foo-bar-1.2.3-SNAPSHOT.jar"); - assertThat(over, isPresent()); - assertThat(over.get(), is(Version.parse("1.2.3-SNAPSHOT"))); + assertThat(over, isPresentWith(Version.parse("1.2.3-SNAPSHOT"))); over = EmbeddedModulePath.version("elasticsearch-8.3.0-SNAPSHOT.jar"); - assertThat(over, isPresent()); - assertThat(over.get(), is(Version.parse("8.3.0-SNAPSHOT"))); + assertThat(over, isPresentWith(Version.parse("8.3.0-SNAPSHOT"))); expectThrows(IAE, () -> EmbeddedModulePath.version("")); expectThrows(IAE, () -> EmbeddedModulePath.version("foo")); diff --git a/libs/core/src/test/java/org/elasticsearch/core/internal/provider/InMemoryModuleFinderTests.java b/libs/core/src/test/java/org/elasticsearch/core/internal/provider/InMemoryModuleFinderTests.java index 5f7cc6374339f..361b9ea7fae0c 100644 --- a/libs/core/src/test/java/org/elasticsearch/core/internal/provider/InMemoryModuleFinderTests.java +++ b/libs/core/src/test/java/org/elasticsearch/core/internal/provider/InMemoryModuleFinderTests.java @@ -29,12 +29,14 @@ import static org.elasticsearch.test.hamcrest.ModuleDescriptorMatchers.providesOf; import static org.elasticsearch.test.hamcrest.ModuleDescriptorMatchers.requiresOf; import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.not; public class InMemoryModuleFinderTests extends ESTestCase { @@ -45,7 +47,7 @@ public void testOfModuleDescriptor() { ModuleDescriptor fooMd = ModuleDescriptor.newModule("foo").build(); ModuleDescriptor barMd = ModuleDescriptor.newModule("bar").build(); var finder = InMemoryModuleFinder.of(fooMd, barMd); - assertThat(finder.findAll().size(), is(2)); + assertThat(finder.findAll(), hasSize(2)); var fooMod = finder.find("foo"); var barMod = finder.find("bar"); assertThat(fooMod, isPresent()); @@ -79,7 +81,7 @@ public void testAutoModuleEmbeddedJar() throws Exception { // automatic module, and no filtering var finder = InMemoryModuleFinder.of(Set.of(), fooRoot); - assertThat(finder.findAll().size(), is(1)); + assertThat(finder.findAll(), hasSize(1)); var mod = finder.find("foo"); assertThat(mod, isPresent()); assertThat(mod.get().descriptor().isAutomatic(), is(true)); @@ -135,7 +137,7 @@ private void testExplicitModuleEmbeddedJarVersionSpecific(int version) throws Ex try (FileSystem fileSystem = FileSystems.newFileSystem(outerJar, Map.of(), InMemoryModuleFinderTests.class.getClassLoader())) { Path mRoot = fileSystem.getPath("/a/b/m.jar"); var finder = InMemoryModuleFinder.of(Set.of(), mRoot); - assertThat(finder.findAll().size(), is(1)); + assertThat(finder.findAll(), hasSize(1)); var mref = finder.find("m"); assertThat(mref, isPresent()); assertThat(mref.get().descriptor().isAutomatic(), is(false)); @@ -161,7 +163,7 @@ public void testAutoModuleExplodedPath() throws Exception { // automatic module, and no filtering var finder = InMemoryModuleFinder.of(Set.of(), fooRoot); - assertThat(finder.findAll().size(), is(1)); + assertThat(finder.findAll(), hasSize(1)); var mod = finder.find("foo"); assertThat(mod, isPresent()); assertThat(mod.get().descriptor().isAutomatic(), is(true)); @@ -218,8 +220,7 @@ public void testFilterRequiresBasic() { { // filter the bar module var md = InMemoryModuleFinder.filterRequires(initialMd, Set.of("bar")); assertThat(md.name(), is("foo")); - assertThat(md.version(), isPresent()); - assertThat(md.version().get(), is(Version.parse("1.0"))); + assertThat(md.version(), isPresentWith(Version.parse("1.0"))); assertThat(md.requires(), hasItem(requiresOf("baz"))); assertThat(md.requires(), not(hasItem(requiresOf("bar")))); assertThat(md.exports(), containsInAnyOrder(exportsOf("p"), exportsOf("q", Set.of("baz")))); @@ -240,8 +241,8 @@ public void testFilterRequiresOpenModule() { assertThat(md.isOpen(), is(true)); assertThat(md.name(), equalTo("openMod")); assertThat(md.requires(), not(hasItem(requiresOf("bar")))); - assertThat(md.exports(), iterableWithSize(0)); - assertThat(md.opens(), iterableWithSize(0)); + assertThat(md.exports(), empty()); + assertThat(md.opens(), empty()); } public void testFilterRequiresAutoModule() { diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java index 3cc56c949e852..9fed6708de53c 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -44,6 +44,7 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.randomBytes; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -133,8 +134,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final int rangeStart = getRangeStart(exchange); assertThat(rangeStart, lessThan(bytes.length)); final Optional rangeEnd = getRangeEnd(exchange); - assertThat(rangeEnd.isPresent(), is(true)); - assertThat(rangeEnd.get(), greaterThanOrEqualTo(rangeStart)); + assertThat(rangeEnd, isPresentWith(greaterThanOrEqualTo(rangeStart))); final int length = (rangeEnd.get() - rangeStart) + 1; assertThat(length, lessThanOrEqualTo(bytes.length - rangeStart)); exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index 673499e4b2461..7cfe95bc69f23 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -70,6 +70,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -203,7 +204,7 @@ public void testWriteBlobWithRetries() throws Exception { assertThat(exchange.getRequestURI().getQuery(), containsString("uploadType=multipart")); if (countDown.countDown()) { Optional> content = parseMultipartRequestBody(exchange.getRequestBody()); - assertThat(content.isPresent(), is(true)); + assertThat(content, isPresent()); assertThat(content.get().v1(), equalTo(blobContainer.path().buildAsString() + "write_blob_max_retries")); if (Objects.deepEquals(bytes, BytesReference.toBytes(content.get().v2()))) { byte[] response = Strings.format(""" diff --git a/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java b/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java index ed5fdb79c4ba6..c2d0983e4f825 100644 --- a/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java +++ b/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java @@ -25,6 +25,8 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -110,12 +112,16 @@ public void testOnNodeStartedSuccess() { public void testOnNodeStartedFailure() { final int rc = randomIntBetween(Integer.MIN_VALUE, -1); - runTestOnNodeStarted(Boolean.TRUE.toString(), rc, (maybe, plugin) -> { - assertThat(maybe, OptionalMatchers.isPresent()); - // noinspection OptionalGetWithoutIsPresent - assertThat(maybe.get(), instanceOf(RuntimeException.class)); - assertThat(maybe.get(), hasToString(containsString("sd_notify returned error [" + rc + "]"))); - }); + runTestOnNodeStarted( + Boolean.TRUE.toString(), + rc, + (maybe, plugin) -> assertThat( + maybe, + isPresentWith( + allOf(instanceOf(RuntimeException.class), hasToString(containsString("sd_notify returned error [" + rc + "]"))) + ) + ) + ); } public void testOnNodeStartedNotEnabled() { diff --git a/server/src/test/java/org/elasticsearch/action/RequestValidatorsTests.java b/server/src/test/java/org/elasticsearch/action/RequestValidatorsTests.java index 8db6f8b1186bb..45843b553069f 100644 --- a/server/src/test/java/org/elasticsearch/action/RequestValidatorsTests.java +++ b/server/src/test/java/org/elasticsearch/action/RequestValidatorsTests.java @@ -11,13 +11,17 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.Randomness; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.hamcrest.OptionalMatchers; -import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.List; import java.util.Optional; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; +import static org.hamcrest.Matchers.arrayWithSize; + public class RequestValidatorsTests extends ESTestCase { private final RequestValidators.RequestValidator EMPTY = (request, state, indices) -> Optional.empty(); @@ -32,17 +36,17 @@ public void testValidates() { validators.add(EMPTY); } final RequestValidators requestValidators = new RequestValidators<>(validators); - assertThat(requestValidators.validateRequest(null, null, null), OptionalMatchers.isEmpty()); + assertThat(requestValidators.validateRequest(null, null, null), isEmpty()); } public void testFailure() { final RequestValidators validators = new RequestValidators<>(List.of(FAIL)); - assertThat(validators.validateRequest(null, null, null), OptionalMatchers.isPresent()); + assertThat(validators.validateRequest(null, null, null), isPresent()); } public void testValidatesAfterFailure() { final RequestValidators validators = new RequestValidators<>(List.of(FAIL, EMPTY)); - assertThat(validators.validateRequest(null, null, null), OptionalMatchers.isPresent()); + assertThat(validators.validateRequest(null, null, null), isPresent()); } public void testMultipleFailures() { @@ -53,9 +57,7 @@ public void testMultipleFailures() { } final RequestValidators requestValidators = new RequestValidators<>(validators); final Optional e = requestValidators.validateRequest(null, null, null); - assertThat(e, OptionalMatchers.isPresent()); - // noinspection OptionalGetWithoutIsPresent - assertThat(e.get().getSuppressed(), Matchers.arrayWithSize(numberOfFailures - 1)); + assertThat(e, isPresentWith(transformedMatch(Exception::getSuppressed, arrayWithSize(numberOfFailures - 1)))); } public void testRandom() { @@ -74,11 +76,9 @@ public void testRandom() { final RequestValidators requestValidators = new RequestValidators<>(validators); final Optional e = requestValidators.validateRequest(null, null, null); if (numberOfFailures == 0) { - assertThat(e, OptionalMatchers.isEmpty()); + assertThat(e, isEmpty()); } else { - assertThat(e, OptionalMatchers.isPresent()); - // noinspection OptionalGetWithoutIsPresent - assertThat(e.get().getSuppressed(), Matchers.arrayWithSize(numberOfFailures - 1)); + assertThat(e, isPresentWith(transformedMatch(Exception::getSuppressed, arrayWithSize(numberOfFailures - 1)))); } } diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java index 2c6b4e0983b2b..30687e35a8ad9 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkUtilsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.common.network; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.hamcrest.OptionalMatchers; import java.net.InetAddress; import java.net.NetworkInterface; @@ -17,6 +16,8 @@ import java.util.Optional; import static org.elasticsearch.common.network.NetworkUtils.getInterfaces; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -80,8 +81,7 @@ public void testMaybeGetInterfaceByName() throws Exception { networkInterfaces, netIf.getName() ); - assertThat(maybeNetworkInterface, OptionalMatchers.isPresent()); - assertThat(maybeNetworkInterface.get().getName(), equalTo(netIf.getName())); + assertThat(maybeNetworkInterface, isPresentWith(transformedMatch(NetworkInterface::getName, equalTo(netIf.getName())))); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index d4f69ef669c83..16bf27207c130 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -185,6 +185,7 @@ import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.Matchers.contains; @@ -7719,7 +7720,7 @@ private static void assertCommitGenerations(List commits, List commits, long generation) { var releasable = commits.keySet().stream().filter(c -> c.getGeneration() == generation).findFirst(); - assertThat(releasable.isPresent(), is(true)); + assertThat(releasable, isPresent()); Engine.IndexCommitRef indexCommitRef = commits.get(releasable.get()); try { indexCommitRef.close(); diff --git a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java index f9647c27e0acb..b0415104f4422 100644 --- a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java @@ -23,9 +23,9 @@ import java.util.Optional; import java.util.ServiceLoader; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -72,7 +72,7 @@ public int getValue() { public void testNoProvider() { Optional service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class)); - assertThat(service.isEmpty(), is(true)); + assertThat(service, isEmpty()); } public void testOneProvider() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginDescriptorTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginDescriptorTests.java index 000dc1a33ed91..7ed4d975fe3be 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginDescriptorTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginDescriptorTests.java @@ -24,6 +24,7 @@ import java.util.Map; import java.util.function.Consumer; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -183,14 +184,14 @@ public void testReadFromPropertiesJvmMissingClassname() throws Exception { public void testReadFromPropertiesModulenameFallback() throws Exception { PluginDescriptor info = mockInternalDescriptor("modulename", null); - assertThat(info.getModuleName().isPresent(), is(false)); + assertThat(info.getModuleName(), isEmpty()); assertThat(info.isModular(), is(false)); assertThat(info.getExtendedPlugins(), empty()); } public void testReadFromPropertiesModulenameEmpty() throws Exception { PluginDescriptor info = mockInternalDescriptor("modulename", " "); - assertThat(info.getModuleName().isPresent(), is(false)); + assertThat(info.getModuleName(), isEmpty()); assertThat(info.isModular(), is(false)); assertThat(info.getExtendedPlugins(), empty()); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java index b1ffda669e6a1..d8ddd7c356b33 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -28,10 +28,10 @@ import java.util.Set; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -166,8 +166,7 @@ public void testWrapAndResolveConnectionRoundTrip() { final Optional actual = RemoteConnectionManager .resolveRemoteClusterAliasWithCredentials(wrappedConnection); - assertThat(actual.isPresent(), is(true)); - assertThat(actual.get(), equalTo(new RemoteConnectionManager.RemoteClusterAliasWithCredentials(clusterAlias, credentials))); + assertThat(actual, isPresentWith(new RemoteConnectionManager.RemoteClusterAliasWithCredentials(clusterAlias, credentials))); } private static class TestRemoteConnection extends CloseableConnection { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/OptionalMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/OptionalMatchers.java index 333b8dea76ce2..1cd92296a4ec7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/OptionalMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/OptionalMatchers.java @@ -8,66 +8,79 @@ package org.elasticsearch.test.hamcrest; +import org.hamcrest.BaseMatcher; import org.hamcrest.Description; +import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.util.Optional; +import static org.hamcrest.Matchers.anything; +import static org.hamcrest.Matchers.equalTo; + public class OptionalMatchers { private static class IsEmptyMatcher extends TypeSafeMatcher> { - @Override - protected boolean matchesSafely(final Optional item) { - // noinspection OptionalAssignedToNull - return item != null && item.isEmpty(); + protected boolean matchesSafely(Optional item) { + return item.isEmpty(); } @Override - public void describeTo(final Description description) { - description.appendText("expected empty optional"); + protected void describeMismatchSafely(Optional item, Description mismatchDescription) { + mismatchDescription.appendText("a non-empty optional ").appendValue(item.get()); } @Override - protected void describeMismatchSafely(final Optional item, final Description mismatchDescription) { - if (item == null) { - mismatchDescription.appendText("was null"); - } else { - mismatchDescription.appendText("was ").appendText(item.toString()); - } + public void describeTo(Description description) { + description.appendText("an empty optional"); } - } - public static IsEmptyMatcher isEmpty() { + public static Matcher> isEmpty() { return new IsEmptyMatcher(); } - private static class IsPresentMatcher extends TypeSafeMatcher> { + private static class IsPresentMatcher extends BaseMatcher> { + private final Matcher contents; + + private IsPresentMatcher(Matcher contents) { + this.contents = contents; + } @Override - protected boolean matchesSafely(final Optional item) { - return item != null && item.isPresent(); + public boolean matches(Object actual) { + Optional opt = (Optional) actual; + return opt.isPresent() && contents.matches(opt.get()); } @Override - public void describeTo(final Description description) { - description.appendText("expected non-empty optional"); + public void describeTo(Description description) { + description.appendText("a non-empty optional ").appendDescriptionOf(contents); } @Override - protected void describeMismatchSafely(final Optional item, final Description mismatchDescription) { - if (item == null) { - mismatchDescription.appendText("was null"); - } else { - mismatchDescription.appendText("was empty"); + public void describeMismatch(Object item, Description description) { + Optional opt = (Optional) item; + if (opt.isEmpty()) { + description.appendText("an empty optional"); + return; } + + description.appendText("an optional "); + contents.describeMismatch(opt.get(), description); } + } + public static Matcher> isPresent() { + return new IsPresentMatcher<>(anything()); } - public static IsPresentMatcher isPresent() { - return new IsPresentMatcher(); + public static Matcher> isPresentWith(T contents) { + return new IsPresentMatcher<>(equalTo(contents)); } + public static Matcher> isPresentWith(Matcher contents) { + return new IsPresentMatcher<>(contents); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/hamcrest/OptionalMatchersTests.java b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/OptionalMatchersTests.java new file mode 100644 index 0000000000000..0318410bb269f --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/hamcrest/OptionalMatchersTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.hamcrest; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.StringDescription; + +import java.util.Optional; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class OptionalMatchersTests extends ESTestCase { + + public void testEmptyMatcher() { + assertThat(Optional.empty(), isEmpty()); + assertThat(Optional.of(""), not(isEmpty())); + + StringDescription desc = new StringDescription(); + isEmpty().describeMismatch(Optional.of(""), desc); + assertThat(desc.toString(), equalTo("a non-empty optional \"\"")); + } + + public void testIsPresentMatcher() { + assertThat(Optional.of(""), isPresent()); + assertThat(Optional.empty(), not(isPresent())); + + StringDescription desc = new StringDescription(); + isPresent().describeMismatch(Optional.empty(), desc); + assertThat(desc.toString(), equalTo("an empty optional")); + } + + public void testIsPresentWithMatcher() { + assertThat(Optional.of(""), isPresentWith("")); + assertThat(Optional.of("foo"), not(isPresentWith(""))); + assertThat(Optional.empty(), not(isPresentWith(""))); + + StringDescription desc = new StringDescription(); + isPresentWith("foo").describeMismatch(Optional.empty(), desc); + assertThat(desc.toString(), equalTo("an empty optional")); + + desc = new StringDescription(); + isPresentWith("foo").describeMismatch(Optional.of(""), desc); + assertThat(desc.toString(), equalTo("an optional was \"\"")); + } +} diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java index 5c47f5a9dc6a4..3f147c94c5ec2 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/nodeinfo/AutoscalingNodesInfoServiceTests.java @@ -63,6 +63,9 @@ import java.util.stream.IntStream; import java.util.stream.Stream; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.elasticsearch.xpack.autoscaling.capacity.nodeinfo.AutoscalingNodeInfoService.FETCH_TIMEOUT; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -148,8 +151,8 @@ public void testAddRemoveNode() { ); client.respondStats(response, () -> { Sets.union(missingNodes, Sets.difference(previousNodes, nodes)) - .forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); - Sets.intersection(previousSucceededNodes, nodes).forEach(n -> assertThat(service.snapshot().get(n).isPresent(), is(true))); + .forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); + Sets.intersection(previousSucceededNodes, nodes).forEach(n -> assertThat(service.snapshot().get(n), isPresent())); }); client.respondInfo(responseInfo, () -> { @@ -159,7 +162,7 @@ public void testAddRemoveNode() { client.assertNoResponder(); assertMatchesResponse(succeedingNodes, response, responseInfo); - failingNodes.forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + failingNodes.forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); previousNodes.clear(); previousNodes.addAll(nodes); @@ -177,7 +180,7 @@ public void testNotMaster() { // client throws if called. service.onClusterChanged(new ClusterChangedEvent("test", state, ClusterState.EMPTY_STATE)); - nodes.forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + nodes.forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); } public void testNoLongerMaster() { @@ -208,7 +211,7 @@ public void testNoLongerMaster() { // client throws if called. service.onClusterChanged(new ClusterChangedEvent("test", notMasterState, masterState)); - nodes.forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + nodes.forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); } public void testStatsFails() { @@ -218,7 +221,7 @@ public void testStatsFails() { client.respondStats((r, listener) -> listener.onFailure(randomFrom(new IllegalStateException(), new RejectedExecutionException()))); service.onClusterChanged(new ClusterChangedEvent("test", state, ClusterState.EMPTY_STATE)); - nodes.forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + nodes.forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); NodesStatsResponse response = new NodesStatsResponse( ClusterName.DEFAULT, @@ -249,7 +252,7 @@ public void testInfoFails() { client.respondStats(response, () -> {}); client.respondInfo((r, listener) -> listener.onFailure(randomFrom(new IllegalStateException(), new RejectedExecutionException()))); service.onClusterChanged(new ClusterChangedEvent("test", state, ClusterState.EMPTY_STATE)); - nodes.forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + nodes.forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); NodesInfoResponse responseInfo = new NodesInfoResponse( ClusterName.DEFAULT, nodes.stream().map(n -> infoForNode(n, randomIntBetween(1, 64))).collect(Collectors.toList()), @@ -316,7 +319,7 @@ public void testRestartNode() { assertMatchesResponse(Sets.intersection(restartedNodes, nodes), response, responseInfo); assertMatchesResponse(Sets.difference(restartedNodes, nodes), restartedStatsResponse, restartedInfoResponse); - Sets.difference(nodes, restartedNodes).forEach(n -> assertThat(service.snapshot().get(n).isEmpty(), is(true))); + Sets.difference(nodes, restartedNodes).forEach(n -> assertThat(service.snapshot().get(n), isEmpty())); } public void testConcurrentStateUpdate() throws Exception { @@ -396,10 +399,9 @@ private Set randomIrrelevantRoles(Set> relevantRo public void assertMatchesResponse(Set nodes, NodesStatsResponse response, NodesInfoResponse infoResponse) { nodes.forEach(n -> { - assertThat(service.snapshot().get(n).isPresent(), is(true)); assertThat( - service.snapshot().get(n).get(), - equalTo( + service.snapshot().get(n), + isPresentWith( new AutoscalingNodeInfo( response.getNodesMap().get(n.getId()).getOs().getMem().getAdjustedTotal().getBytes(), Processors.of(infoResponse.getNodesMap().get(n.getId()).getInfo(OsInfo.class).getFractionalAllocatedProcessors()) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java index 4e6b88d2ff054..75706f3d6a9bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentTests.java @@ -25,6 +25,8 @@ import java.util.List; import java.util.stream.Stream; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; @@ -112,8 +114,8 @@ public void testGetStartedNodes() { public void testCalculateAllocationStatus_GivenNoAllocations() { assertThat( - TrainedModelAssignment.Builder.empty(randomTaskParams(5)).build().calculateAllocationStatus().get(), - equalTo(new AllocationStatus(0, 5)) + TrainedModelAssignment.Builder.empty(randomTaskParams(5)).build().calculateAllocationStatus(), + isPresentWith(new AllocationStatus(0, 5)) ); } @@ -121,7 +123,7 @@ public void testCalculateAllocationStatus_GivenStoppingAssignment() { TrainedModelAssignment.Builder builder = TrainedModelAssignment.Builder.empty(randomTaskParams(5)); builder.addRoutingEntry("node-1", new RoutingInfo(1, 2, RoutingState.STARTED, "")); builder.addRoutingEntry("node-2", new RoutingInfo(2, 1, RoutingState.STARTED, "")); - assertThat(builder.stopAssignment("test").build().calculateAllocationStatus().isEmpty(), is(true)); + assertThat(builder.stopAssignment("test").build().calculateAllocationStatus(), isEmpty()); } public void testCalculateAllocationStatus_GivenPartiallyAllocated() { @@ -129,14 +131,14 @@ public void testCalculateAllocationStatus_GivenPartiallyAllocated() { builder.addRoutingEntry("node-1", new RoutingInfo(1, 2, RoutingState.STARTED, "")); builder.addRoutingEntry("node-2", new RoutingInfo(2, 1, RoutingState.STARTED, "")); builder.addRoutingEntry("node-3", new RoutingInfo(3, 3, RoutingState.STARTING, "")); - assertThat(builder.build().calculateAllocationStatus().get(), equalTo(new AllocationStatus(3, 5))); + assertThat(builder.build().calculateAllocationStatus(), isPresentWith(new AllocationStatus(3, 5))); } public void testCalculateAllocationStatus_GivenFullyAllocated() { TrainedModelAssignment.Builder builder = TrainedModelAssignment.Builder.empty(randomTaskParams(5)); builder.addRoutingEntry("node-1", new RoutingInfo(4, 4, RoutingState.STARTED, "")); builder.addRoutingEntry("node-2", new RoutingInfo(1, 1, RoutingState.STARTED, "")); - assertThat(builder.build().calculateAllocationStatus().get(), equalTo(new AllocationStatus(5, 5))); + assertThat(builder.build().calculateAllocationStatus(), isPresentWith(new AllocationStatus(5, 5))); } public void testCalculateAssignmentState_GivenNoStartedAssignments() { @@ -179,8 +181,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenSin var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STARTED); - assertThat(nodes, hasSize(1)); - assertThat(nodes.get(0), equalTo(new Tuple<>("node-1", 1))); + assertThat(nodes, contains(new Tuple<>("node-1", 1))); } public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenAShuttingDownRoute_ItReturnsNoNodes() { @@ -200,8 +201,7 @@ public void testselectRandomStartedNodeWeighedOnAllocationsForNRequests_GivenASh var nodes = assignment.selectRandomStartedNodesWeighedOnAllocationsForNRequests(1, RoutingState.STOPPING); - assertThat(nodes, hasSize(1)); - assertThat(nodes.get(0), equalTo(new Tuple<>("node-1", 1))); + assertThat(nodes, contains(new Tuple<>("node-1", 1))); } public void testSingleRequestWith2Nodes() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index c8fbe00d07618..4fff2804f9350 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -48,6 +48,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -114,7 +115,7 @@ public void testToXContentForInternalStorage() throws IOException { ) { Job parsedConfig = Job.LENIENT_PARSER.apply(parser, null).build(); // When we are writing for internal storage, we do not include the datafeed config - assertThat(parsedConfig.getDatafeedConfig().isPresent(), is(false)); + assertThat(parsedConfig.getDatafeedConfig(), isEmpty()); } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationCheckerTests.java index 84d39d9a02070..c90b578db8f09 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/MlDeprecationCheckerTests.java @@ -17,6 +17,7 @@ import java.util.Collections; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.is; public class MlDeprecationCheckerTests extends ESTestCase { @@ -47,7 +48,7 @@ public void testCheckDataFeedQuery() { DatafeedConfig.Builder goodDatafeed = new DatafeedConfig.Builder("good-df", "job-id"); goodDatafeed.setIndices(Collections.singletonList("some-index")); goodDatafeed.setParsedQuery(QueryBuilders.termQuery("foo", "bar")); - assertThat(MlDeprecationChecker.checkDataFeedQuery(goodDatafeed.build(), xContentRegistry()).isPresent(), is(false)); + assertThat(MlDeprecationChecker.checkDataFeedQuery(goodDatafeed.build(), xContentRegistry()), isEmpty()); DatafeedConfig.Builder deprecatedDatafeed = new DatafeedConfig.Builder("df-with-deprecated-query", "job-id"); deprecatedDatafeed.setIndices(Collections.singletonList("some-index")); diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java index 9e03f7469d71e..86d18bcbbbbc4 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichRestartIT.java @@ -19,13 +19,14 @@ import java.util.List; import java.util.Optional; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.DECORATE_FIELDS; import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.MATCH_FIELD; import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.POLICY_NAME; import static org.elasticsearch.xpack.enrich.EnrichMultiNodeIT.SOURCE_INDEX_NAME; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.hasSize; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class EnrichRestartIT extends ESIntegTestCase { @@ -72,14 +73,14 @@ public void testRestart() throws Exception { private static void verifyPolicies(int numPolicies, EnrichPolicy enrichPolicy) { GetEnrichPolicyAction.Response response = client().execute(GetEnrichPolicyAction.INSTANCE, new GetEnrichPolicyAction.Request()) .actionGet(); - assertThat(response.getPolicies().size(), equalTo(numPolicies)); + assertThat(response.getPolicies(), hasSize(numPolicies)); for (int i = 0; i < numPolicies; i++) { String policyName = POLICY_NAME + i; Optional result = response.getPolicies() .stream() .filter(namedPolicy -> namedPolicy.getName().equals(policyName)) .findFirst(); - assertThat(result.isPresent(), is(true)); + assertThat(result, isPresent()); assertThat(result.get().getPolicy(), equalTo(enrichPolicy)); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index ed7cfad8bf195..a130f669583fa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ensemble.Ensemble; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.metadata.Hyperparameters; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.metadata.TrainedModelMetadata; +import org.hamcrest.Matchers; import org.junit.After; import java.io.IOException; @@ -47,11 +48,12 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; -import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; @@ -129,9 +131,8 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws // double predictionValue = (double) resultsObject.get(predictedClassField); // assertThat(predictionValue, closeTo(10 * featureValue, 2.0)); - assertThat(resultsObject.containsKey(predictedClassField), is(true)); - assertThat(resultsObject.containsKey("is_training"), is(true)); - assertThat(resultsObject.get("is_training"), is(destDoc.containsKey(DEPENDENT_VARIABLE_FIELD))); + assertThat(resultsObject, hasKey(predictedClassField)); + assertThat(resultsObject, hasEntry("is_training", destDoc.containsKey(DEPENDENT_VARIABLE_FIELD))); @SuppressWarnings("unchecked") List> importanceArray = (List>) resultsObject.get("feature_importance"); @@ -144,15 +145,13 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws } } - assertThat(importanceArray, hasSize(greaterThan(0))); assertThat( - importanceArray.stream() - .filter( - m -> NUMERICAL_FEATURE_FIELD.equals(m.get("feature_name")) - || DISCRETE_NUMERICAL_FEATURE_FIELD.equals(m.get("feature_name")) + importanceArray, + hasItem( + either(Matchers.hasEntry("feature_name", NUMERICAL_FEATURE_FIELD)).or( + hasEntry("feature_name", DISCRETE_NUMERICAL_FEATURE_FIELD) ) - .findAny(), - isPresent() + ) ); } @@ -504,20 +503,18 @@ public void testWithDatastream() throws Exception { Map destDoc = getDestDoc(config, hit); Map resultsObject = getMlResultsObjectFromDestDoc(destDoc); - assertThat(resultsObject.containsKey(predictedClassField), is(true)); - assertThat(resultsObject.containsKey("is_training"), is(true)); - assertThat(resultsObject.get("is_training"), is(destDoc.containsKey(DEPENDENT_VARIABLE_FIELD))); + assertThat(resultsObject, hasKey(predictedClassField)); + assertThat(resultsObject, hasEntry("is_training", destDoc.containsKey(DEPENDENT_VARIABLE_FIELD))); @SuppressWarnings("unchecked") List> importanceArray = (List>) resultsObject.get("feature_importance"); - assertThat(importanceArray, hasSize(greaterThan(0))); + assertThat( - importanceArray.stream() - .filter( - m -> NUMERICAL_FEATURE_FIELD.equals(m.get("feature_name")) - || DISCRETE_NUMERICAL_FEATURE_FIELD.equals(m.get("feature_name")) + importanceArray, + hasItem( + either(Matchers.hasEntry("feature_name", NUMERICAL_FEATURE_FIELD)).or( + hasEntry("feature_name", DISCRETE_NUMERICAL_FEATURE_FIELD) ) - .findAny(), - isPresent() + ) ); } }); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java index 0b3851012d0e8..a916900b199ce 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlMemoryAutoscalingDeciderTests.java @@ -61,6 +61,8 @@ import java.util.function.LongSupplier; import static java.lang.Math.min; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; import static org.elasticsearch.xpack.ml.MachineLearning.MACHINE_MEMORY_NODE_ATTR; import static org.elasticsearch.xpack.ml.MachineLearning.MAX_JVM_SIZE_NODE_ATTR; import static org.elasticsearch.xpack.ml.MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD; @@ -72,7 +74,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.ArgumentMatchers.any; @@ -978,7 +979,7 @@ public void testScaleDown() { ByteSizeValue.ofGb(5).getBytes() - PER_NODE_OVERHEAD ) ); - assertThat(result.isEmpty(), is(false)); + assertThat(result, isPresent()); MlMemoryAutoscalingCapacity deciderResult = result.get(); // Four times due to 25% ML memory assertThat(deciderResult.nodeSize().getBytes(), equalTo(4 * ByteSizeValue.ofGb(1).getBytes())); @@ -1013,7 +1014,7 @@ public void testScaleDown() { ByteSizeValue.ofGb(1).getBytes() - PER_NODE_OVERHEAD ) ); - assertThat(result.isEmpty(), is(false)); + assertThat(result, isPresent()); MlMemoryAutoscalingCapacity deciderResult = result.get(); // Four times due to 25% ML memory assertThat(deciderResult.nodeSize().getBytes(), equalTo(4 * ByteSizeValue.ofMb(100).getBytes())); @@ -1048,7 +1049,7 @@ public void testScaleDown() { ByteSizeValue.ofMb(100).getBytes() - PER_NODE_OVERHEAD ) ); - assertThat(result.isEmpty(), is(true)); + assertThat(result, isEmpty()); } } @@ -1210,7 +1211,7 @@ public void testFutureAvailableCapacity() { Collection mlNodesInCluster = clusterState.getNodes().getNodes().values(); Optional nativeMemoryCapacity = decider.calculateFutureAvailableCapacity(mlNodesInCluster, clusterState); - assertThat(nativeMemoryCapacity.isEmpty(), is(false)); + assertThat(nativeMemoryCapacity, isPresent()); assertThat(nativeMemoryCapacity.get().getNodeMlNativeMemoryRequirementExcludingOverhead(), greaterThanOrEqualTo(TEST_JOB_SIZE)); assertThat( nativeMemoryCapacity.get().getNodeMlNativeMemoryRequirementExcludingOverhead(), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 1d90039365c65..7bc3d507ecf22 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -49,6 +49,7 @@ import java.util.function.Function; import java.util.stream.Collectors; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -134,7 +135,7 @@ public void testTwoPageExtraction() throws IOException { // Third batch should return empty rows = dataExtractor.next(); - assertThat(rows.isEmpty(), is(true)); + assertThat(rows, isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); // Now let's assert we're sending the expected search requests @@ -223,7 +224,7 @@ public void testRecoveryFromErrorOnSearch() throws IOException { // Next batch should return empty rows = dataExtractor.next(); - assertThat(rows.isEmpty(), is(true)); + assertThat(rows, isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); // Notice we've done 4 searches @@ -267,7 +268,7 @@ public void testIncludeSourceIsFalseAndNoSourceFields() throws IOException { assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); assertThat(dataExtractor.hasNext(), is(true)); - assertThat(dataExtractor.next().isEmpty(), is(true)); + assertThat(dataExtractor.next(), isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); assertThat(dataExtractor.capturedSearchRequests.size(), equalTo(2)); @@ -302,7 +303,7 @@ public void testIncludeSourceIsFalseAndAtLeastOneSourceField() throws IOExceptio assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); assertThat(dataExtractor.hasNext(), is(true)); - assertThat(dataExtractor.next().isEmpty(), is(true)); + assertThat(dataExtractor.next(), isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); assertThat(dataExtractor.capturedSearchRequests.size(), equalTo(2)); @@ -380,7 +381,7 @@ public void testMissingValues_GivenSupported() throws IOException { // Third batch should return empty rows = dataExtractor.next(); - assertThat(rows.isEmpty(), is(true)); + assertThat(rows, isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); } @@ -414,7 +415,7 @@ public void testMissingValues_GivenNotSupported() throws IOException { // Third batch should return empty rows = dataExtractor.next(); - assertThat(rows.isEmpty(), is(true)); + assertThat(rows, isEmpty()); assertThat(dataExtractor.hasNext(), is(false)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java index 6a72ccf4c4445..bc94144bce1c5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/AssignmentPlannerTests.java @@ -23,6 +23,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -43,7 +44,7 @@ public void testModelThatDoesNotFitInMemory() { List nodes = List.of(new Node("n_1", scaleNodeSize(50), 4)); Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(51).getBytes(), 4, 1, Map.of(), 0, 0, 0); AssignmentPlan plan = new AssignmentPlanner(nodes, List.of(deployment)).computePlan(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); } { // With perDeploymentMemory and perAllocationMemory specified List nodes = List.of(new Node("n_1", scaleNodeSize(55), 4)); @@ -58,7 +59,7 @@ public void testModelThatDoesNotFitInMemory() { ByteSizeValue.ofMb(51).getBytes() ); AssignmentPlan plan = new AssignmentPlanner(nodes, List.of(deployment)).computePlan(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); } } @@ -66,7 +67,7 @@ public void testModelWithThreadsPerAllocationNotFittingOnAnyNode() { List nodes = List.of(new Node("n_1", scaleNodeSize(100), 4), new Node("n_2", scaleNodeSize(100), 5)); Deployment deployment = new AssignmentPlan.Deployment("m_1", ByteSizeValue.ofMb(1).getBytes(), 1, 6, Map.of(), 0, 0, 0); AssignmentPlan plan = new AssignmentPlanner(nodes, List.of(deployment)).computePlan(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); } public void testSingleModelThatFitsFullyOnSingleNode() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java index c45ce36394109..7f83df5835494 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveAllAllocationsTests.java @@ -15,9 +15,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; public class PreserveAllAllocationsTests extends ESTestCase { @@ -89,12 +90,12 @@ public void testGivenPreviousAssignments() { AssignmentPlan plan = AssignmentPlan.builder(List.of(node1, node2), List.of(deployment1, deployment2)) .assignModelToNode(deployment1, node1, 2) .build(); - assertThat(plan.assignments(deployment1).get(), equalTo(Map.of(node1, 2))); - assertThat(plan.assignments(deployment2).isEmpty(), is(true)); + assertThat(plan.assignments(deployment1), isPresentWith(Map.of(node1, 2))); + assertThat(plan.assignments(deployment2), isEmpty()); plan = preserveAllAllocations.mergePreservedAllocations(plan); - assertThat(plan.assignments(deployment1).get(), equalTo(Map.of(node1, 3))); - assertThat(plan.assignments(deployment2).get(), equalTo(Map.of(node1, 1, node2, 2))); + assertThat(plan.assignments(deployment1), isPresentWith(Map.of(node1, 3))); + assertThat(plan.assignments(deployment2), isPresentWith(Map.of(node1, 1, node2, 2))); // Node 1 already had deployments 1 and 2 assigned to it so adding more allocation doesn't change memory usage. assertThat(plan.getRemainingNodeMemory("n_1"), equalTo(0L)); @@ -174,12 +175,12 @@ public void testGivenPreviousAssignments() { AssignmentPlan plan = AssignmentPlan.builder(List.of(node1, node2), List.of(deployment1, deployment2)) .assignModelToNode(deployment1, node1, 2) .build(); - assertThat(plan.assignments(deployment1).get(), equalTo(Map.of(node1, 2))); - assertThat(plan.assignments(deployment2).isEmpty(), is(true)); + assertThat(plan.assignments(deployment1), isPresentWith(Map.of(node1, 2))); + assertThat(plan.assignments(deployment2), isEmpty()); plan = preserveAllAllocations.mergePreservedAllocations(plan); - assertThat(plan.assignments(deployment1).get(), equalTo(Map.of(node1, 3))); - assertThat(plan.assignments(deployment2).get(), equalTo(Map.of(node1, 1, node2, 2))); + assertThat(plan.assignments(deployment1), isPresentWith(Map.of(node1, 3))); + assertThat(plan.assignments(deployment2), isPresentWith(Map.of(node1, 1, node2, 2))); // 1000 - ((30 + 300 + 3*10) + (50 + 300 + 10)) = 280 : deployments use 720 MB on the node 1 assertThat(plan.getRemainingNodeMemory("n_1"), equalTo(ByteSizeValue.ofMb(280).getBytes())); @@ -198,11 +199,10 @@ public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments PreserveAllAllocations preserveAllAllocations = new PreserveAllAllocations(List.of(node), List.of(deployment)); AssignmentPlan plan = AssignmentPlan.builder(List.of(node), List.of(deployment)).build(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); plan = preserveAllAllocations.mergePreservedAllocations(plan); - assertThat(plan.assignments(deployment).isPresent(), is(true)); - assertThat(plan.assignments(deployment).get(), equalTo(Map.of(node, 2))); + assertThat(plan.assignments(deployment), isPresentWith(Map.of(node, 2))); assertThat(plan.getRemainingNodeMemory("n_1"), equalTo(ByteSizeValue.ofMb(100).getBytes())); assertThat(plan.getRemainingNodeCores("n_1"), equalTo(0)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java index f646bf5cb2e9d..d2907eb31160b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/planning/PreserveOneAllocationTests.java @@ -15,10 +15,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; public class PreserveOneAllocationTests extends ESTestCase { @@ -202,11 +203,10 @@ public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments PreserveOneAllocation preserveOneAllocation = new PreserveOneAllocation(List.of(node), List.of(deployment)); AssignmentPlan plan = AssignmentPlan.builder(List.of(node), List.of(deployment)).build(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); plan = preserveOneAllocation.mergePreservedAllocations(plan); - assertThat(plan.assignments(deployment).isPresent(), is(true)); - assertThat(plan.assignments(deployment).get(), equalTo(Map.of(node, 1))); + assertThat(plan.assignments(deployment), isPresentWith(Map.of(node, 1))); // 400 - (30*2 + 240) = 100 : deployments use 300MB on the node assertThat(plan.getRemainingNodeMemory("n_1"), equalTo(ByteSizeValue.ofMb(100).getBytes())); assertThat(plan.getRemainingNodeCores("n_1"), equalTo(2)); @@ -227,11 +227,10 @@ public void testGivenModelWithPreviousAssignments_AndPlanToMergeHasNoAssignments PreserveOneAllocation preserveOneAllocation = new PreserveOneAllocation(List.of(node), List.of(deployment)); AssignmentPlan plan = AssignmentPlan.builder(List.of(node), List.of(deployment)).build(); - assertThat(plan.assignments(deployment).isEmpty(), is(true)); + assertThat(plan.assignments(deployment), isEmpty()); plan = preserveOneAllocation.mergePreservedAllocations(plan); - assertThat(plan.assignments(deployment).isPresent(), is(true)); - assertThat(plan.assignments(deployment).get(), equalTo(Map.of(node, 1))); + assertThat(plan.assignments(deployment), isPresentWith(Map.of(node, 1))); // 400 - (30 + 300 + 10) = 60 : deployments use 340MB on the node assertThat(plan.getRemainingNodeMemory("n_1"), equalTo(ByteSizeValue.ofMb(60).getBytes())); assertThat(plan.getRemainingNodeCores("n_1"), equalTo(2)); From 8773a1208ba0890e03bb49ff178e170849d056d3 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 9 Jan 2024 13:25:51 +0000 Subject: [PATCH 42/47] [ML] Rename ML memory metrics (#104121) The way the ML memory metrics were renamed in #103388 does not make sense. This PR adjusts the names to better options that still pass the naming validation rules. --- .../main/java/org/elasticsearch/xpack/ml/MlMetrics.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java index 35f73551c2c3a..4fd1af3cfa2bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java @@ -103,7 +103,7 @@ public MlMetrics( private void registerMlNodeMetrics(MeterRegistry meterRegistry) { metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage", + "es.ml.native_memory.limit.size", "ML native memory limit on this node.", "bytes", () -> new LongWithAttributes(nativeMemLimit, Map.of()) @@ -111,7 +111,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.anomaly_detectors.usage", + "es.ml.native_memory.anomaly_detectors.usage", "ML native memory used by anomaly detection jobs on this node.", "bytes", () -> new LongWithAttributes(nativeMemAdUsage, Map.of()) @@ -119,7 +119,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.data_frame_analytics.usage", + "es.ml.native_memory.data_frame_analytics.usage", "ML native memory used by data frame analytics jobs on this node.", "bytes", () -> new LongWithAttributes(nativeMemDfaUsage, Map.of()) @@ -127,7 +127,7 @@ private void registerMlNodeMetrics(MeterRegistry meterRegistry) { ); metrics.add( meterRegistry.registerLongGauge( - "es.ml.native_memory.usage.trained_models.usage", + "es.ml.native_memory.trained_models.usage", "ML native memory used by trained models on this node.", "bytes", () -> new LongWithAttributes(nativeMemTrainedModelUsage, Map.of()) From 5ef5dca3349a18cb79e45720cd0c2a0185dc9511 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 9 Jan 2024 08:46:34 -0500 Subject: [PATCH 43/47] ESQL: `MV_FIRST` and `MV_LAST` (#103928) This creates the `MV_FIRST` and `MV_LAST` functions that return the first and last values from a multivalue field. They are noops from a single valued field. They are quite similar to `MV_MIN` and `MV_MAX` except they work on positional data rather than relative size. That sounds like a large distinction, but in practice our multivalued fields are often sorted. And when they operate on sorted arrays `MV_MIN` does *the same* thing as `MV_FIRST`. But there are some cases where it really does matter - say you are `SPLIT`ing something - so `MV_FIRST(SPLIT("foo;bar;baz", ";"))` gets you `foo` like you'd expect. No sorting needed. Relates to #103879 --- docs/changelog/103928.yaml | 5 + docs/reference/esql/esql-functions.asciidoc | 144 ------------------ .../esql/functions/mv-functions.asciidoc | 4 + .../esql/functions/mv_first.asciidoc | 27 ++++ .../reference/esql/functions/mv_last.asciidoc | 27 ++++ .../esql/functions/signature/mv_first.svg | 1 + .../esql/functions/signature/mv_last.svg | 1 + .../esql/functions/types/mv_first.asciidoc | 16 ++ .../esql/functions/types/mv_last.asciidoc | 16 ++ .../compute/ann/MvEvaluator.java | 31 ++-- .../compute/gen/MvEvaluatorImplementer.java | 79 ++++++---- .../src/main/resources/show.csv-spec | 6 +- .../src/main/resources/string.csv-spec | 26 ++++ .../multivalue/MvFirstBooleanEvaluator.java | 89 +++++++++++ .../multivalue/MvFirstBytesRefEvaluator.java | 92 +++++++++++ .../multivalue/MvFirstDoubleEvaluator.java | 89 +++++++++++ .../multivalue/MvFirstIntEvaluator.java | 88 +++++++++++ .../multivalue/MvFirstLongEvaluator.java | 88 +++++++++++ .../multivalue/MvLastBooleanEvaluator.java | 89 +++++++++++ .../multivalue/MvLastBytesRefEvaluator.java | 92 +++++++++++ .../multivalue/MvLastDoubleEvaluator.java | 89 +++++++++++ .../scalar/multivalue/MvLastIntEvaluator.java | 88 +++++++++++ .../multivalue/MvLastLongEvaluator.java | 88 +++++++++++ .../function/EsqlFunctionRegistry.java | 4 + .../function/scalar/multivalue/MvFirst.java | 111 ++++++++++++++ .../function/scalar/multivalue/MvLast.java | 111 ++++++++++++++ .../function/scalar/package-info.java | 2 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 6 + .../scalar/multivalue/MvFirstTests.java | 61 ++++++++ .../scalar/multivalue/MvLastTests.java | 67 ++++++++ 30 files changed, 1452 insertions(+), 185 deletions(-) create mode 100644 docs/changelog/103928.yaml delete mode 100644 docs/reference/esql/esql-functions.asciidoc create mode 100644 docs/reference/esql/functions/mv_first.asciidoc create mode 100644 docs/reference/esql/functions/mv_last.asciidoc create mode 100644 docs/reference/esql/functions/signature/mv_first.svg create mode 100644 docs/reference/esql/functions/signature/mv_last.svg create mode 100644 docs/reference/esql/functions/types/mv_first.asciidoc create mode 100644 docs/reference/esql/functions/types/mv_last.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java diff --git a/docs/changelog/103928.yaml b/docs/changelog/103928.yaml new file mode 100644 index 0000000000000..a9e60ba33a686 --- /dev/null +++ b/docs/changelog/103928.yaml @@ -0,0 +1,5 @@ +pr: 103928 +summary: "ESQL: `MV_FIRST` and `MV_LAST`" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-functions.asciidoc b/docs/reference/esql/esql-functions.asciidoc deleted file mode 100644 index c463c13a34e74..0000000000000 --- a/docs/reference/esql/esql-functions.asciidoc +++ /dev/null @@ -1,144 +0,0 @@ -[[esql-functions]] -== {esql} functions - -++++ -Functions -++++ - -<>, <> and <> support -these functions: - -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> - -include::functions/abs.asciidoc[] -include::functions/acos.asciidoc[] -include::functions/asin.asciidoc[] -include::functions/atan.asciidoc[] -include::functions/atan2.asciidoc[] -include::functions/auto_bucket.asciidoc[] -include::functions/case.asciidoc[] -include::functions/ceil.asciidoc[] -include::functions/cidr_match.asciidoc[] -include::functions/coalesce.asciidoc[] -include::functions/concat.asciidoc[] -include::functions/cos.asciidoc[] -include::functions/cosh.asciidoc[] -include::functions/date_extract.asciidoc[] -include::functions/date_format.asciidoc[] -include::functions/date_parse.asciidoc[] -include::functions/date_trunc.asciidoc[] -include::functions/e.asciidoc[] -include::functions/ends_with.asciidoc[] -include::functions/floor.asciidoc[] -include::functions/greatest.asciidoc[] -include::functions/is_finite.asciidoc[] -include::functions/is_infinite.asciidoc[] -include::functions/is_nan.asciidoc[] -include::functions/least.asciidoc[] -include::functions/left.asciidoc[] -include::functions/length.asciidoc[] -include::functions/log10.asciidoc[] -include::functions/ltrim.asciidoc[] -include::functions/mv_avg.asciidoc[] -include::functions/mv_concat.asciidoc[] -include::functions/mv_count.asciidoc[] -include::functions/mv_dedupe.asciidoc[] -include::functions/mv_max.asciidoc[] -include::functions/mv_median.asciidoc[] -include::functions/mv_min.asciidoc[] -include::functions/mv_sum.asciidoc[] -include::functions/now.asciidoc[] -include::functions/pi.asciidoc[] -include::functions/pow.asciidoc[] -include::functions/replace.asciidoc[] -include::functions/right.asciidoc[] -include::functions/round.asciidoc[] -include::functions/rtrim.asciidoc[] -include::functions/sin.asciidoc[] -include::functions/sinh.asciidoc[] -include::functions/split.asciidoc[] -include::functions/sqrt.asciidoc[] -include::functions/starts_with.asciidoc[] -include::functions/substring.asciidoc[] -include::functions/tan.asciidoc[] -include::functions/tanh.asciidoc[] -include::functions/tau.asciidoc[] -include::functions/to_boolean.asciidoc[] -include::functions/to_cartesianpoint.asciidoc[] -include::functions/to_datetime.asciidoc[] -include::functions/to_degrees.asciidoc[] -include::functions/to_double.asciidoc[] -include::functions/to_geopoint.asciidoc[] -include::functions/to_integer.asciidoc[] -include::functions/to_ip.asciidoc[] -include::functions/to_long.asciidoc[] -include::functions/to_radians.asciidoc[] -include::functions/to_string.asciidoc[] -include::functions/to_unsigned_long.asciidoc[] -include::functions/to_version.asciidoc[] -include::functions/trim.asciidoc[] diff --git a/docs/reference/esql/functions/mv-functions.asciidoc b/docs/reference/esql/functions/mv-functions.asciidoc index 83dbaaadc5c06..a95a3d36a9963 100644 --- a/docs/reference/esql/functions/mv-functions.asciidoc +++ b/docs/reference/esql/functions/mv-functions.asciidoc @@ -12,6 +12,8 @@ * <> * <> * <> +* <> +* <> * <> * <> * <> @@ -22,6 +24,8 @@ include::mv_avg.asciidoc[] include::mv_concat.asciidoc[] include::mv_count.asciidoc[] include::mv_dedupe.asciidoc[] +include::mv_first.asciidoc[] +include::mv_last.asciidoc[] include::mv_max.asciidoc[] include::mv_median.asciidoc[] include::mv_min.asciidoc[] diff --git a/docs/reference/esql/functions/mv_first.asciidoc b/docs/reference/esql/functions/mv_first.asciidoc new file mode 100644 index 0000000000000..42ac8930136cc --- /dev/null +++ b/docs/reference/esql/functions/mv_first.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-mv_first]] +=== `MV_FIRST` +[.text-center] +image::esql/functions/signature/mv_first.svg[Embedded,opts=inline] + +Converts a multivalued field into a single valued field containing the first value. This is most +useful when reading from a function that emits multivalued fields in a known order like <>: + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=mv_first] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=mv_first-result] +|=== + +The order that <> are read from underlying storage is not +guaranteed. It is *frequently* ascending, but don't rely on that. If you need the minimum field value +use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't +a performance benefit to `MV_FIRST`. `MV_FIRST` is mostly useful with functions that create multivalued +fields like `SPLIT`. + +Supported types: + +include::types/mv_first.asciidoc[] diff --git a/docs/reference/esql/functions/mv_last.asciidoc b/docs/reference/esql/functions/mv_last.asciidoc new file mode 100644 index 0000000000000..aa6fc40d0af07 --- /dev/null +++ b/docs/reference/esql/functions/mv_last.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-mv_last]] +=== `MV_LAST` +[.text-center] +image::esql/functions/signature/mv_last.svg[Embedded,opts=inline] + +Converts a multivalued field into a single valued field containing the last value. This is most +useful when reading from a function that emits multivalued fields in a known order like <>: + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=mv_last] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=mv_last-result] +|=== + +The order that <> are read from underlying storage is not +guaranteed. It is *frequently* ascending, but don't rely on that. If you need the maximum field value +use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't +a performance benefit to `MV_LAST`. `MV_LAST` is mostly useful with functions that create multivalued +fields like `SPLIT`. + +Supported types: + +include::types/mv_last.asciidoc[] diff --git a/docs/reference/esql/functions/signature/mv_first.svg b/docs/reference/esql/functions/signature/mv_first.svg new file mode 100644 index 0000000000000..20d201eab0add --- /dev/null +++ b/docs/reference/esql/functions/signature/mv_first.svg @@ -0,0 +1 @@ +MV_FIRST(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_last.svg b/docs/reference/esql/functions/signature/mv_last.svg new file mode 100644 index 0000000000000..eb32bb49f8ccc --- /dev/null +++ b/docs/reference/esql/functions/signature/mv_last.svg @@ -0,0 +1 @@ +MV_LAST(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc new file mode 100644 index 0000000000000..e6c67a454b96b --- /dev/null +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -0,0 +1,16 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | boolean +cartesian_point | cartesian_point +datetime | datetime +double | double +geo_point | geo_point +integer | integer +ip | ip +keyword | keyword +long | long +text | text +unsigned_long | unsigned_long +version | version +|=== diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc new file mode 100644 index 0000000000000..e6c67a454b96b --- /dev/null +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -0,0 +1,16 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | boolean +cartesian_point | cartesian_point +datetime | datetime +double | double +geo_point | geo_point +integer | integer +ip | ip +keyword | keyword +long | long +text | text +unsigned_long | unsigned_long +version | version +|=== diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/MvEvaluator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/MvEvaluator.java index 97f165e67cb44..bb2cb3bf9e5fa 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/MvEvaluator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/MvEvaluator.java @@ -16,17 +16,30 @@ * Implement an evaluator for a function reducing multivalued fields into a * single valued field from a static {@code process} method. *

- * Annotated methods can have two "shapes": pairwise processing and - * accumulator processing. Pairwise is generally - * simpler and looks like {@code int process(int current, int next)}. - * Use it when the result is a primitive. Accumulator processing is - * a bit more complex and looks like {@code void process(State state, int v)} - * and it useful when you need to accumulate more data than fits - * in a primitive result. Think Kahan summation. + * Annotated methods can have three "shapes": *

+ *
    + *
  • pairwise processing
  • + *
  • accumulator processing
  • + *
  • position at a time processing
  • + *
*

- * Both method shapes support at {@code finish = "finish_method"} parameter - * on the annotation which is used to, well, "finish" processing after + * Pairwise processing is generally simpler and looks + * like {@code int process(int current, int next)}. Use it when the result + * is a primitive.

+ *

+ * Accumulator processing is a bit more complex and looks like + * {@code void process(State state, int v)} and it useful when you need to + * accumulate more data than fits in a primitive result. Think Kahan summation. + *

+ *

+ * Position at a time processing just hands the block, start index, and end index + * to the processor and is useful when none of the others fit. It looks like + * {@code long process(LongBlock block, int start, int end)}. + *

+ *

+ * Pairwise and accumulator processing support a {@code finish = "finish_method"} + * parameter on the annotation which is used to, well, "finish" processing after * all values have been received. Again, think reading the sum from the * Kahan summation. Or doing the division for an "average" operation. * This method is required for accumulator processing. diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index 032eb0cee223c..52b1c2b09b629 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -92,13 +92,20 @@ public MvEvaluatorImplementer( ) { this.declarationType = (TypeElement) processFunction.getEnclosingElement(); this.processFunction = processFunction; - if (processFunction.getParameters().size() != 2) { - throw new IllegalArgumentException("process should have exactly two parameters"); + if (processFunction.getParameters().size() == 2) { + this.workType = TypeName.get(processFunction.getParameters().get(0).asType()); + this.fieldType = TypeName.get(processFunction.getParameters().get(1).asType()); + this.finishFunction = FinishFunction.from(declarationType, finishMethodName, workType, fieldType); + this.resultType = this.finishFunction == null ? this.workType : this.finishFunction.resultType; + } else { + if (finishMethodName.equals("") == false) { + throw new IllegalArgumentException("finish function is only supported for pairwise processing"); + } + this.workType = null; + this.fieldType = Types.elementType(TypeName.get(processFunction.getParameters().get(0).asType())); + this.finishFunction = null; + this.resultType = TypeName.get(processFunction.getReturnType()); } - this.workType = TypeName.get(processFunction.getParameters().get(0).asType()); - this.fieldType = TypeName.get(processFunction.getParameters().get(1).asType()); - this.finishFunction = FinishFunction.from(declarationType, finishMethodName, workType, fieldType); - this.resultType = this.finishFunction == null ? this.workType : this.finishFunction.resultType; this.singleValueFunction = SingleValueFunction.from(declarationType, singleValueMethodName, resultType, fieldType); this.ascendingFunction = AscendingFunction.from(this, declarationType, ascendingMethodName); this.warnExceptions = warnExceptions; @@ -208,11 +215,11 @@ private MethodSpec evalShell( Methods.buildFromFactory(builderType) ); - if (false == workType.equals(fieldType) && workType.isPrimitive() == false) { + if (workType != null && false == workType.equals(fieldType) && workType.isPrimitive() == false) { builder.addStatement("$T work = new $T()", workType, workType); } if (fieldType.equals(BYTES_REF)) { - if (workType.equals(fieldType)) { + if (fieldType.equals(workType)) { builder.addStatement("$T firstScratch = new $T()", BYTES_REF, BYTES_REF); builder.addStatement("$T nextScratch = new $T()", BYTES_REF, BYTES_REF); } else { @@ -270,33 +277,45 @@ private MethodSpec eval(String name, boolean nullable) { } builder.addStatement("int end = first + valueCount"); - if (workType.equals(fieldType) || workType.isPrimitive()) { + if (processFunction.getParameters().size() == 2) { // process function evaluates pairwise - fetch(builder, "value", workType, "first", "firstScratch"); - builder.beginControlFlow("for (int i = first + 1; i < end; i++)"); - { - if (fieldType.equals(BYTES_REF)) { - fetch(builder, "next", workType, "i", "nextScratch"); - builder.addStatement("$T.$L(value, next)", declarationType, processFunction.getSimpleName()); + if (workType.equals(fieldType) || workType.isPrimitive()) { + fetch(builder, "value", workType, "first", "firstScratch"); + builder.beginControlFlow("for (int i = first + 1; i < end; i++)"); + { + if (fieldType.equals(BYTES_REF)) { + fetch(builder, "next", workType, "i", "nextScratch"); + builder.addStatement("$T.$L(value, next)", declarationType, processFunction.getSimpleName()); + } else { + fetch(builder, "next", fieldType, "i", "nextScratch"); + builder.addStatement("value = $T.$L(value, next)", declarationType, processFunction.getSimpleName()); + } + } + builder.endControlFlow(); + if (finishFunction == null) { + builder.addStatement("$T result = value", resultType); } else { - fetch(builder, "next", fieldType, "i", "nextScratch"); - builder.addStatement("value = $T.$L(value, next)", declarationType, processFunction.getSimpleName()); + finishFunction.call(builder, "value"); } - } - builder.endControlFlow(); - if (finishFunction == null) { - builder.addStatement("$T result = value", resultType); } else { - finishFunction.call(builder, "value"); + builder.beginControlFlow("for (int i = first; i < end; i++)"); + { + fetch(builder, "value", fieldType, "i", "valueScratch"); + builder.addStatement("$T.$L(work, value)", declarationType, processFunction.getSimpleName()); + } + builder.endControlFlow(); + finishFunction.call(builder, "work"); } } else { - builder.beginControlFlow("for (int i = first; i < end; i++)"); - { - fetch(builder, "value", fieldType, "i", "valueScratch"); - builder.addStatement("$T.$L(work, value)", declarationType, processFunction.getSimpleName()); - } - builder.endControlFlow(); - finishFunction.call(builder, "work"); + // process function evaluates position at a time + String scratch = fieldType.equals(BYTES_REF) ? ", valueScratch" : ""; + builder.addStatement( + "$T result = $T.$L(v, first, end$L)", + resultType, + declarationType, + processFunction.getSimpleName(), + scratch + ); } writeResult(builder); }); @@ -399,7 +418,7 @@ private MethodSpec factoryToString() { private static class FinishFunction { static FinishFunction from(TypeElement declarationType, String name, TypeName workType, TypeName fieldType) { if (name.equals("")) { - if (false == workType.equals(fieldType)) { + if (workType != null && false == workType.equals(fieldType)) { throw new IllegalArgumentException( "the [finish] enum value is required because the first and second arguments differ in type" ); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index ef8ff3b3e6064..21d9c21191f77 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -51,6 +51,8 @@ mv_avg |? mv_avg(arg1:?) mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false mv_dedupe |"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" |v | "boolean|date|double|ip|text|integer|keyword|version|long" | "" |? | "Remove duplicate values from a multivalued field." | false | false +mv_first |"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the first value." | false | false +mv_last |"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "Reduce a multivalued field to a single valued field containing the last value." | false | false mv_max |"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false mv_median |? mv_median(arg1:?) |arg1 |? | "" |? | "" | false | false mv_min |"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false @@ -143,6 +145,8 @@ boolean is_nan(n:double) "keyword mv_concat(v:text|keyword, delim:text|keyword)" "integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" "? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" +"? mv_first(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"? mv_last(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" "? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" ? mv_median(arg1:?) "? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" @@ -211,5 +215,5 @@ countFunctions#[skip:-8.12.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -85 | 85 | 85 +87 | 87 | 87 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 1f78a63c8c4d8..b8b80df389f9c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -415,6 +415,32 @@ ROW a=["foo", "zoo", "bar"] // end::mv_concat-result[] ; +mvFirst#[skip:-8.12.99, reason:Added in 8.13.0] +// tag::mv_first[] +ROW a="foo;bar;baz" +| EVAL first_a = MV_FIRST(SPLIT(a, ";")) +// end::mv_first[] +; + +// tag::mv_first-result[] + a:keyword | first_a:keyword +foo;bar;baz | "foo" +// end::mv_first-result[] +; + +mvLast#[skip:-8.12.99, reason:Added in 8.13.0] +// tag::mv_last[] +ROW a="foo;bar;baz" +| EVAL last_a = MV_LAST(SPLIT(a, ";")) +// end::mv_last[] +; + +// tag::mv_last-result[] + a:keyword | last_a:keyword +foo;bar;baz | "baz" +// end::mv_last-result[] +; + mvMax // tag::mv_max[] ROW a=["foo", "zoo", "bar"] diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java new file mode 100644 index 0000000000000..bf946aab347d2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBooleanEvaluator.java @@ -0,0 +1,89 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. + * This class is generated. Do not edit it. + */ +public final class MvFirstBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvFirstBooleanEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvFirst"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + BooleanBlock v = (BooleanBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BooleanBlock.Builder builder = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean result = MvFirst.process(v, first, end); + builder.appendBoolean(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + BooleanBlock v = (BooleanBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BooleanVector.FixedBuilder builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean result = MvFirst.process(v, first, end); + builder.appendBoolean(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvFirstBooleanEvaluator get(DriverContext context) { + return new MvFirstBooleanEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvFirst[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java new file mode 100644 index 0000000000000..e0cb6ca4c289b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstBytesRefEvaluator.java @@ -0,0 +1,92 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. + * This class is generated. Do not edit it. + */ +public final class MvFirstBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvFirstBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvFirst"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + BytesRefBlock v = (BytesRefBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef valueScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef result = MvFirst.process(v, first, end, valueScratch); + builder.appendBytesRef(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + BytesRefBlock v = (BytesRefBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BytesRefVector.Builder builder = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef valueScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef result = MvFirst.process(v, first, end, valueScratch); + builder.appendBytesRef(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvFirstBytesRefEvaluator get(DriverContext context) { + return new MvFirstBytesRefEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvFirst[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java new file mode 100644 index 0000000000000..584319cc1ab82 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstDoubleEvaluator.java @@ -0,0 +1,89 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. + * This class is generated. Do not edit it. + */ +public final class MvFirstDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvFirstDoubleEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvFirst"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + DoubleBlock v = (DoubleBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double result = MvFirst.process(v, first, end); + builder.appendDouble(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + DoubleBlock v = (DoubleBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double result = MvFirst.process(v, first, end); + builder.appendDouble(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvFirstDoubleEvaluator get(DriverContext context) { + return new MvFirstDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvFirst[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java new file mode 100644 index 0000000000000..1e9c50d135559 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstIntEvaluator.java @@ -0,0 +1,88 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. + * This class is generated. Do not edit it. + */ +public final class MvFirstIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvFirstIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvFirst"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + IntBlock v = (IntBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int result = MvFirst.process(v, first, end); + builder.appendInt(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + IntBlock v = (IntBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (IntVector.FixedBuilder builder = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int result = MvFirst.process(v, first, end); + builder.appendInt(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvFirstIntEvaluator get(DriverContext context) { + return new MvFirstIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvFirst[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java new file mode 100644 index 0000000000000..7e118a1eb9eb8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstLongEvaluator.java @@ -0,0 +1,88 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvFirst}. + * This class is generated. Do not edit it. + */ +public final class MvFirstLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvFirstLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvFirst"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + LongBlock v = (LongBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long result = MvFirst.process(v, first, end); + builder.appendLong(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + LongBlock v = (LongBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (LongVector.FixedBuilder builder = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long result = MvFirst.process(v, first, end); + builder.appendLong(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvFirstLongEvaluator get(DriverContext context) { + return new MvFirstLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvFirst[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java new file mode 100644 index 0000000000000..75b49f2c3e8ee --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBooleanEvaluator.java @@ -0,0 +1,89 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. + * This class is generated. Do not edit it. + */ +public final class MvLastBooleanEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvLastBooleanEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvLast"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + BooleanBlock v = (BooleanBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BooleanBlock.Builder builder = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean result = MvLast.process(v, first, end); + builder.appendBoolean(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + BooleanBlock v = (BooleanBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BooleanVector.FixedBuilder builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + boolean result = MvLast.process(v, first, end); + builder.appendBoolean(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvLastBooleanEvaluator get(DriverContext context) { + return new MvLastBooleanEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvLast[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java new file mode 100644 index 0000000000000..fcca356b38576 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastBytesRefEvaluator.java @@ -0,0 +1,92 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. + * This class is generated. Do not edit it. + */ +public final class MvLastBytesRefEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvLastBytesRefEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvLast"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + BytesRefBlock v = (BytesRefBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef valueScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef result = MvLast.process(v, first, end, valueScratch); + builder.appendBytesRef(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + BytesRefBlock v = (BytesRefBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (BytesRefVector.Builder builder = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef valueScratch = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + BytesRef result = MvLast.process(v, first, end, valueScratch); + builder.appendBytesRef(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvLastBytesRefEvaluator get(DriverContext context) { + return new MvLastBytesRefEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvLast[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java new file mode 100644 index 0000000000000..b0cf7bf59900b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastDoubleEvaluator.java @@ -0,0 +1,89 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. + * This class is generated. Do not edit it. + */ +public final class MvLastDoubleEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvLastDoubleEvaluator(EvalOperator.ExpressionEvaluator field, + DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvLast"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + DoubleBlock v = (DoubleBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double result = MvLast.process(v, first, end); + builder.appendDouble(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + DoubleBlock v = (DoubleBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + double result = MvLast.process(v, first, end); + builder.appendDouble(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvLastDoubleEvaluator get(DriverContext context) { + return new MvLastDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvLast[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java new file mode 100644 index 0000000000000..5c2af9218308d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastIntEvaluator.java @@ -0,0 +1,88 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. + * This class is generated. Do not edit it. + */ +public final class MvLastIntEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvLastIntEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvLast"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + IntBlock v = (IntBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int result = MvLast.process(v, first, end); + builder.appendInt(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + IntBlock v = (IntBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (IntVector.FixedBuilder builder = driverContext.blockFactory().newIntVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + int result = MvLast.process(v, first, end); + builder.appendInt(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvLastIntEvaluator get(DriverContext context) { + return new MvLastIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvLast[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java new file mode 100644 index 0000000000000..37b95378f1f5b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastLongEvaluator.java @@ -0,0 +1,88 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link MvLast}. + * This class is generated. Do not edit it. + */ +public final class MvLastLongEvaluator extends AbstractMultivalueFunction.AbstractEvaluator { + public MvLastLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) { + super(driverContext, field); + } + + @Override + public String name() { + return "MvLast"; + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNullable(Block fieldVal) { + LongBlock v = (LongBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + if (valueCount == 0) { + builder.appendNull(); + continue; + } + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long result = MvLast.process(v, first, end); + builder.appendLong(result); + } + return builder.build(); + } + } + + /** + * Evaluate blocks containing at least one multivalued field. + */ + @Override + public Block evalNotNullable(Block fieldVal) { + LongBlock v = (LongBlock) fieldVal; + int positionCount = v.getPositionCount(); + try (LongVector.FixedBuilder builder = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = v.getValueCount(p); + int first = v.getFirstValueIndex(p); + int end = first + valueCount; + long result = MvLast.process(v, first, end); + builder.appendLong(result); + } + return builder.build().asBlock(); + } + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvLastLongEvaluator get(DriverContext context) { + return new MvLastLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvLast[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 3b76141fa541e..5b72a601180d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -67,6 +67,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvConcat; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvDedupe; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvFirst; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvLast; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedian; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; @@ -194,6 +196,8 @@ private FunctionDefinition[][] functions() { def(MvConcat.class, MvConcat::new, "mv_concat"), def(MvCount.class, MvCount::new, "mv_count"), def(MvDedupe.class, MvDedupe::new, "mv_dedupe"), + def(MvFirst.class, MvFirst::new, "mv_first"), + def(MvLast.class, MvLast::new, "mv_last"), def(MvMax.class, MvMax::new, "mv_max"), def(MvMedian.class, MvMedian::new, "mv_median"), def(MvMin.class, MvMin::new, "mv_min"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java new file mode 100644 index 0000000000000..1acb135292995 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.MvEvaluator; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; + +/** + * Reduce a multivalued field to a single valued field containing the minimum value. + */ +public class MvFirst extends AbstractMultivalueFunction { + @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the first value.") + public MvFirst( + Source source, + @Param( + name = "v", + type = { + "unsigned_long", + "date", + "boolean", + "double", + "ip", + "text", + "integer", + "keyword", + "version", + "long", + "geo_point", + "cartesian_point" } + ) Expression field + ) { + super(source, field); + } + + @Override + protected TypeResolution resolveFieldType() { + return isType(field(), EsqlDataTypes::isRepresentable, sourceText(), null, "representable"); + } + + @Override + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { + return switch (PlannerUtils.toElementType(field().dataType())) { + case BOOLEAN -> new MvFirstBooleanEvaluator.Factory(fieldEval); + case BYTES_REF -> new MvFirstBytesRefEvaluator.Factory(fieldEval); + case DOUBLE -> new MvFirstDoubleEvaluator.Factory(fieldEval); + case INT -> new MvFirstIntEvaluator.Factory(fieldEval); + case LONG -> new MvFirstLongEvaluator.Factory(fieldEval); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); + }; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new MvFirst(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MvFirst::new, field()); + } + + @MvEvaluator(extraName = "Boolean") + static boolean process(BooleanBlock block, int start, int end) { + return block.getBoolean(start); + } + + @MvEvaluator(extraName = "Long") + static long process(LongBlock block, int start, int end) { + return block.getLong(start); + } + + @MvEvaluator(extraName = "Int") + static int process(IntBlock block, int start, int end) { + return block.getInt(start); + } + + @MvEvaluator(extraName = "Double") + static double process(DoubleBlock block, int start, int end) { + return block.getDouble(start); + } + + @MvEvaluator(extraName = "BytesRef") + static BytesRef process(BytesRefBlock block, int start, int end, BytesRef scratch) { + return block.getBytesRef(start, scratch); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java new file mode 100644 index 0000000000000..2e6066a6dc98c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.MvEvaluator; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; + +/** + * Reduce a multivalued field to a single valued field containing the minimum value. + */ +public class MvLast extends AbstractMultivalueFunction { + @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the last value.") + public MvLast( + Source source, + @Param( + name = "v", + type = { + "unsigned_long", + "date", + "boolean", + "double", + "ip", + "text", + "integer", + "keyword", + "version", + "long", + "geo_point", + "cartesian_point" } + ) Expression field + ) { + super(source, field); + } + + @Override + protected TypeResolution resolveFieldType() { + return isType(field(), EsqlDataTypes::isRepresentable, sourceText(), null, "representable"); + } + + @Override + protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { + return switch (PlannerUtils.toElementType(field().dataType())) { + case BOOLEAN -> new MvLastBooleanEvaluator.Factory(fieldEval); + case BYTES_REF -> new MvLastBytesRefEvaluator.Factory(fieldEval); + case DOUBLE -> new MvLastDoubleEvaluator.Factory(fieldEval); + case INT -> new MvLastIntEvaluator.Factory(fieldEval); + case LONG -> new MvLastLongEvaluator.Factory(fieldEval); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); + }; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new MvLast(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MvLast::new, field()); + } + + @MvEvaluator(extraName = "Boolean") + static boolean process(BooleanBlock block, int start, int end) { + return block.getBoolean(end - 1); + } + + @MvEvaluator(extraName = "Long") + static long process(LongBlock block, int start, int end) { + return block.getLong(end - 1); + } + + @MvEvaluator(extraName = "Int") + static int process(IntBlock block, int start, int end) { + return block.getInt(end - 1); + } + + @MvEvaluator(extraName = "Double") + static double process(DoubleBlock block, int start, int end) { + return block.getDouble(end - 1); + } + + @MvEvaluator(extraName = "BytesRef") + static BytesRef process(BytesRefBlock block, int start, int end, BytesRef scratch) { + return block.getBytesRef(end - 1, scratch); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 1470c3ec1e5ae..c0caaf8b180ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -100,7 +100,7 @@ * {@code ./gradlew -p x-pack/plugin/esql/ check} * *

  • - * Now it's time to write some docs! Open {@code docs/reference/esql/esql-functions.asciidoc} + * Now it's time to write some docs! Open {@code docs/reference/esql/esql-functions-operators.asciidoc} * and add your function in alphabetical order to the list at the top and then add it to * the includes below. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 4f03f7a7d72ff..ee37b34e58d39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -87,6 +87,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvConcat; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvDedupe; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvFirst; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvLast; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMedian; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; @@ -392,6 +394,8 @@ public static List namedTypeEntries() { of(ScalarFunction.class, MvCount.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvConcat.class, PlanNamedTypes::writeMvConcat, PlanNamedTypes::readMvConcat), of(ScalarFunction.class, MvDedupe.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), + of(ScalarFunction.class, MvFirst.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), + of(ScalarFunction.class, MvLast.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvMax.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvMedian.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvMin.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), @@ -1544,6 +1548,8 @@ static void writeAggFunction(PlanStreamOutput out, AggregateFunction aggregateFu entry(name(MvAvg.class), MvAvg::new), entry(name(MvCount.class), MvCount::new), entry(name(MvDedupe.class), MvDedupe::new), + entry(name(MvFirst.class), MvFirst::new), + entry(name(MvLast.class), MvLast::new), entry(name(MvMax.class), MvMax::new), entry(name(MvMedian.class), MvMedian::new), entry(name(MvMin.class), MvMin::new), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java new file mode 100644 index 0000000000000..91c30b7c1f566 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class MvFirstTests extends AbstractMultivalueFunctionTestCase { + public MvFirstTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List cases = new ArrayList<>(); + booleans(cases, "mv_first", "MvFirst", DataTypes.BOOLEAN, (size, values) -> equalTo(values.findFirst().get())); + bytesRefs(cases, "mv_first", "MvFirst", Function.identity(), (size, values) -> equalTo(values.findFirst().get())); + doubles(cases, "mv_first", "MvFirst", DataTypes.DOUBLE, (size, values) -> equalTo(values.findFirst().getAsDouble())); + ints(cases, "mv_first", "MvFirst", DataTypes.INTEGER, (size, values) -> equalTo(values.findFirst().getAsInt())); + longs(cases, "mv_first", "MvFirst", DataTypes.LONG, (size, values) -> equalTo(values.findFirst().getAsLong())); + unsignedLongs(cases, "mv_first", "MvFirst", DataTypes.UNSIGNED_LONG, (size, values) -> equalTo(values.findFirst().get())); + dateTimes(cases, "mv_first", "MvFirst", DataTypes.DATETIME, (size, values) -> equalTo(values.findFirst().getAsLong())); + geoPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.GEO_POINT, (size, values) -> equalTo(values.findFirst().get())); + cartesianPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); + } + + @Override + protected Expression build(Source source, Expression field) { + return new MvFirst(source, field); + } + + @Override + protected DataType[] supportedTypes() { + return representableTypes(); + } + + @Override + protected DataType expectedType(List argTypes) { + return argTypes.get(0); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java new file mode 100644 index 0000000000000..7577cbf7dd0a8 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class MvLastTests extends AbstractMultivalueFunctionTestCase { + public MvLastTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List cases = new ArrayList<>(); + booleans(cases, "mv_last", "MvLast", DataTypes.BOOLEAN, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + bytesRefs(cases, "mv_last", "MvLast", Function.identity(), (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + doubles(cases, "mv_last", "MvLast", DataTypes.DOUBLE, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsDouble())); + ints(cases, "mv_last", "MvLast", DataTypes.INTEGER, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsInt())); + longs(cases, "mv_last", "MvLast", DataTypes.LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); + unsignedLongs(cases, "mv_last", "MvLast", DataTypes.UNSIGNED_LONG, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + dateTimes(cases, "mv_last", "MvLast", DataTypes.DATETIME, (size, values) -> equalTo(values.reduce((f, s) -> s).getAsLong())); + geoPoints(cases, "mv_last", "MvLast", EsqlDataTypes.GEO_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + cartesianPoints( + cases, + "mv_last", + "MvLast", + EsqlDataTypes.CARTESIAN_POINT, + (size, values) -> equalTo(values.reduce((f, s) -> s).get()) + ); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); + } + + @Override + protected Expression build(Source source, Expression field) { + return new MvLast(source, field); + } + + @Override + protected DataType[] supportedTypes() { + return representableTypes(); + } + + @Override + protected DataType expectedType(List argTypes) { + return argTypes.get(0); + } +} From 8df232e0753547f911d9dda0728d3b3edfc6bdf7 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 9 Jan 2024 15:10:54 +0100 Subject: [PATCH 44/47] Export circuit breaker trip counts as a single counter with a type attribute (#103657) This is done to make it easier building dashboards. --- .../breaker/ChildMemoryCircuitBreaker.java | 5 +- .../breaker/CircuitBreakerMetrics.java | 147 +++--------------- .../HierarchyCircuitBreakerService.java | 13 +- .../elasticsearch/node/NodeConstruction.java | 6 +- .../HierarchyCircuitBreakerServiceTests.java | 4 +- ...HierarchyCircuitBreakerTelemetryTests.java | 81 ++-------- 6 files changed, 40 insertions(+), 216 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java b/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java index a12071f9c27e3..20ab42eba0386 100644 --- a/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java +++ b/server/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java @@ -14,6 +14,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.telemetry.metric.LongCounter; +import java.util.Collections; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.core.Strings.format; @@ -32,6 +33,8 @@ public class ChildMemoryCircuitBreaker implements CircuitBreaker { private final String name; private final LongCounter trippedCountMeter; + public static final String CIRCUIT_BREAKER_TYPE_ATTRIBUTE = "type"; + /** * Create a circuit breaker that will break if the number of estimated * bytes grows above the limit. All estimations will be multiplied by @@ -68,7 +71,7 @@ public ChildMemoryCircuitBreaker( public void circuitBreak(String fieldName, long bytesNeeded) { final long memoryBytesLimit = this.limitAndOverhead.limit; this.trippedCount.incrementAndGet(); - this.trippedCountMeter.increment(); + this.trippedCountMeter.incrementBy(1L, Collections.singletonMap(CIRCUIT_BREAKER_TYPE_ATTRIBUTE, this.name)); final String message = "[" + this.name + "] Data too large, data for [" diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/CircuitBreakerMetrics.java b/server/src/main/java/org/elasticsearch/indices/breaker/CircuitBreakerMetrics.java index 3e018385ccc7a..0b858368f0b0b 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/CircuitBreakerMetrics.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/CircuitBreakerMetrics.java @@ -8,111 +8,36 @@ package org.elasticsearch.indices.breaker; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.MeterRegistry; -import java.util.Collections; -import java.util.Map; import java.util.Objects; /** - * A class collecting trip counters for circuit breakers (parent, field data, request, in flight requests and custom child circuit + * A class collecting trip count metrics for circuit breakers (parent, field data, request, in flight requests and custom child circuit * breakers). * - * The circuit breaker name is part of the (long) counter metric name instead of being an attribute because aggregating distinct circuit - * breakers trip counter values does not make sense, as for instance, summing es.breaker.field_data.trip.total and - * es.breaker.in_flight_requests.trip.total. - * Those counters trip for different reasons even if the underlying reason is "too much memory usage". Aggregating them together results in - * losing the ability to understand where the underlying issue is (too much field data, too many concurrent requests, too large concurrent - * requests?). Aggregating each one of them separately to get, for instance, cluster level or cloud region level statistics is perfectly - * fine, instead. - * - * NOTE: here we have the ability to register custom trip counters too. This ability is something a few plugins take advantage of nowadays. - * At the time of writing this class it is just "Eql" and "MachineLearning" which track memory used to store "things" that are - * application/plugin specific such as eql sequence query objects and inference model objects. As a result, we just have a couple of this - * custom counters. This means we have 6 circuit breaker counter metrics per node (parent, field_data, request, in_flight_requests, - * eql_sequence and model_inference). We register them a bit differently to keep the ability for plugins to define their own circuit breaker - * trip counters. + * The circuit breaker name is used as an attribute so that we define a single counter metric where the name is mapped to a 'type' + * attribute. The counter trips for different reasons even if the underlying reason is "too much memory usage". Aggregating them together + * results in losing the ability to understand where the underlying issue is (too much field data, too many concurrent requests, too large + * concurrent requests?). As a result weadvise in aggregations queries not to "aggregate away" the type attribute so that you treat each + * circuit breaker as a separate counter. */ public class CircuitBreakerMetrics { - public static final CircuitBreakerMetrics NOOP = new CircuitBreakerMetrics(TelemetryProvider.NOOP, Collections.emptyMap()); - public static final String ES_BREAKER_PARENT_TRIP_COUNT_TOTAL = "es.breaker.parent.trip.total"; - public static final String ES_BREAKER_FIELD_DATA_TRIP_COUNT_TOTAL = "es.breaker.field_data.trip.total"; - public static final String ES_BREAKER_REQUEST_TRIP_COUNT_TOTAL = "es.breaker.request.trip.total"; - public static final String ES_BREAKER_IN_FLIGHT_REQUESTS_TRIP_COUNT_TOTAL = "es.breaker.in_flight_requests.trip.total"; - - private static final String ES_BREAKER_CUSTOM_TRIP_COUNT_TOTAL_TEMPLATE = "es.breaker.%s.trip.total"; - private final MeterRegistry registry; - private final LongCounter parentTripCountTotal; - private final LongCounter fielddataTripCountTotal; - private final LongCounter requestTripCountTotal; - private final LongCounter inFlightRequestsCountTotal; - private final Map customTripCountsTotal; - - private CircuitBreakerMetrics( - final MeterRegistry registry, - final LongCounter parentTripCountTotal, - final LongCounter fielddataTripCountTotal, - final LongCounter requestTripCountTotal, - final LongCounter inFlightRequestsCountTotal, - final Map customTripCountsTotal - ) { - this.registry = registry; - this.parentTripCountTotal = parentTripCountTotal; - this.fielddataTripCountTotal = fielddataTripCountTotal; - this.requestTripCountTotal = requestTripCountTotal; - this.inFlightRequestsCountTotal = inFlightRequestsCountTotal; - this.customTripCountsTotal = customTripCountsTotal; - } - - public CircuitBreakerMetrics(final TelemetryProvider telemetryProvider, final Map customTripCounters) { - this( - telemetryProvider.getMeterRegistry(), - telemetryProvider.getMeterRegistry() - .registerLongCounter(ES_BREAKER_PARENT_TRIP_COUNT_TOTAL, "Parent circuit breaker trip count", "count"), - telemetryProvider.getMeterRegistry() - .registerLongCounter(ES_BREAKER_FIELD_DATA_TRIP_COUNT_TOTAL, "Field data circuit breaker trip count", "count"), - telemetryProvider.getMeterRegistry() - .registerLongCounter(ES_BREAKER_REQUEST_TRIP_COUNT_TOTAL, "Request circuit breaker trip count", "count"), - telemetryProvider.getMeterRegistry() - .registerLongCounter( - ES_BREAKER_IN_FLIGHT_REQUESTS_TRIP_COUNT_TOTAL, - "In-flight requests circuit breaker trip count", - "count" - ), - customTripCounters - ); - } - - public LongCounter getParentTripCountTotal() { - return parentTripCountTotal; - } + public static final CircuitBreakerMetrics NOOP = new CircuitBreakerMetrics(TelemetryProvider.NOOP); + public static final String ES_BREAKER_TRIP_COUNT_TOTAL = "es.breaker.trip.total"; + private final LongCounter tripCount; - public LongCounter getFielddataTripCountTotal() { - return fielddataTripCountTotal; + private CircuitBreakerMetrics(final LongCounter tripCount) { + this.tripCount = tripCount; } - public LongCounter getRequestTripCountTotal() { - return requestTripCountTotal; + public CircuitBreakerMetrics(final TelemetryProvider telemetryProvider) { + this(telemetryProvider.getMeterRegistry().registerLongCounter(ES_BREAKER_TRIP_COUNT_TOTAL, "Circuit breaker trip count", "count")); } - public LongCounter getInFlightRequestsCountTotal() { - return inFlightRequestsCountTotal; - } - - public Map getCustomTripCountsTotal() { - return customTripCountsTotal; - } - - public LongCounter getCustomTripCount(final String name, final LongCounter theDefault) { - return this.customTripCountsTotal.getOrDefault(name, theDefault); - } - - public LongCounter getCustomTripCount(final String name) { - return this.customTripCountsTotal.getOrDefault(name, LongCounter.NOOP); + public LongCounter getTripCount() { + return tripCount; } @Override @@ -120,53 +45,17 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CircuitBreakerMetrics that = (CircuitBreakerMetrics) o; - return Objects.equals(registry, that.registry) - && Objects.equals(parentTripCountTotal, that.parentTripCountTotal) - && Objects.equals(fielddataTripCountTotal, that.fielddataTripCountTotal) - && Objects.equals(requestTripCountTotal, that.requestTripCountTotal) - && Objects.equals(inFlightRequestsCountTotal, that.inFlightRequestsCountTotal) - && Objects.equals(customTripCountsTotal, that.customTripCountsTotal); + return Objects.equals(tripCount, that.tripCount); } @Override public int hashCode() { - return Objects.hash( - registry, - parentTripCountTotal, - fielddataTripCountTotal, - requestTripCountTotal, - inFlightRequestsCountTotal, - customTripCountsTotal - ); + return Objects.hash(tripCount); } @Override public String toString() { - return "CircuitBreakerMetrics{" - + "registry=" - + registry - + ", parentTripCountTotal=" - + parentTripCountTotal - + ", fielddataTripCountTotal=" - + fielddataTripCountTotal - + ", requestTripCountTotal=" - + requestTripCountTotal - + ", inFlightRequestsCountTotal=" - + inFlightRequestsCountTotal - + ", customTripCountsTotal=" - + customTripCountsTotal - + '}'; - } - - public void addCustomCircuitBreaker(final CircuitBreaker circuitBreaker) { - if (this.customTripCountsTotal.containsKey(circuitBreaker.getName())) { - throw new IllegalArgumentException("A circuit circuitBreaker named [" + circuitBreaker.getName() + " already exists"); - } - final String canonicalName = Strings.format(ES_BREAKER_CUSTOM_TRIP_COUNT_TOTAL_TEMPLATE, circuitBreaker.getName()); - this.customTripCountsTotal.put( - canonicalName, - registry.registerLongCounter(canonicalName, "A custom circuit circuitBreaker [" + circuitBreaker.getName() + "]", "count") - ); + return "CircuitBreakerMetrics{" + ", tripCount=" + tripCount + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 43e6c02ebe3c6..5a33af26e4a3f 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -171,7 +171,7 @@ public HierarchyCircuitBreakerService( childCircuitBreakers.put( CircuitBreaker.FIELDDATA, validateAndCreateBreaker( - metrics.getFielddataTripCountTotal(), + metrics.getTripCount(), new BreakerSettings( CircuitBreaker.FIELDDATA, FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), @@ -184,7 +184,7 @@ public HierarchyCircuitBreakerService( childCircuitBreakers.put( CircuitBreaker.IN_FLIGHT_REQUESTS, validateAndCreateBreaker( - metrics.getInFlightRequestsCountTotal(), + metrics.getTripCount(), new BreakerSettings( CircuitBreaker.IN_FLIGHT_REQUESTS, IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), @@ -197,7 +197,7 @@ public HierarchyCircuitBreakerService( childCircuitBreakers.put( CircuitBreaker.REQUEST, validateAndCreateBreaker( - metrics.getRequestTripCountTotal(), + metrics.getTripCount(), new BreakerSettings( CircuitBreaker.REQUEST, REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.get(settings).getBytes(), @@ -215,10 +215,7 @@ public HierarchyCircuitBreakerService( + "] exists. Circuit breaker names must be unique" ); } - childCircuitBreakers.put( - breakerSettings.getName(), - validateAndCreateBreaker(metrics.getCustomTripCount(breakerSettings.getName()), breakerSettings) - ); + childCircuitBreakers.put(breakerSettings.getName(), validateAndCreateBreaker(metrics.getTripCount(), breakerSettings)); } this.breakers = Map.copyOf(childCircuitBreakers); this.parentSettings = new BreakerSettings( @@ -262,7 +259,7 @@ public HierarchyCircuitBreakerService( this.overLimitStrategyFactory = overLimitStrategyFactory; this.overLimitStrategy = overLimitStrategyFactory.apply(this.trackRealMemoryUsage); - this.parentTripCountTotalMetric = metrics.getParentTripCountTotal(); + this.parentTripCountTotalMetric = metrics.getTripCount(); } private void updateCircuitBreakerSettings(String name, ByteSizeValue newLimit, Double newOverhead) { diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 0de5657c0cb1a..018abebdb7709 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -183,7 +183,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -207,7 +206,6 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Supplier; @@ -665,9 +663,8 @@ private void construct( IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class).toList()); modules.add(indicesModule); - final Map customTripCounters = new TreeMap<>(); CircuitBreakerService circuitBreakerService = createCircuitBreakerService( - new CircuitBreakerMetrics(telemetryProvider, customTripCounters), + new CircuitBreakerMetrics(telemetryProvider), settingsModule.getSettings(), settingsModule.getClusterSettings() ); @@ -1300,7 +1297,6 @@ private CircuitBreakerService createCircuitBreakerService( pluginBreakers.forEach(t -> { final CircuitBreaker circuitBreaker = circuitBreakerService.getBreaker(t.v2().getName()); t.v1().setCircuitBreaker(circuitBreaker); - metrics.addCustomCircuitBreaker(circuitBreaker); }); return circuitBreakerService; diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index a517d09b2aefe..15f6d0ed377fa 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -80,7 +80,7 @@ public void checkParentLimit(long newBytesReserved, String label) throws Circuit }; final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, (BYTES_PER_THREAD * NUM_THREADS) - 1, 1.0); final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker( - CircuitBreakerMetrics.NOOP.getParentTripCountTotal(), + CircuitBreakerMetrics.NOOP.getTripCount(), settings, logger, (HierarchyCircuitBreakerService) service, @@ -158,7 +158,7 @@ public void checkParentLimit(long newBytesReserved, String label) throws Circuit }; final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, childLimit, 1.0); final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker( - CircuitBreakerMetrics.NOOP.getParentTripCountTotal(), + CircuitBreakerMetrics.NOOP.getTripCount(), settings, logger, (HierarchyCircuitBreakerService) service, diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerTelemetryTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerTelemetryTests.java index 961fe2dc15efe..2cbe1202520df 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerTelemetryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerTelemetryTests.java @@ -31,6 +31,7 @@ import java.util.function.Function; import java.util.stream.Stream; +import static org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker.CIRCUIT_BREAKER_TYPE_ATTRIBUTE; import static org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING; import static org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING; import static org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING; @@ -49,53 +50,8 @@ protected Collection> nodePlugins() { public static class TestCircuitBreakerTelemetryPlugin extends TestTelemetryPlugin { protected final MeterRegistry meter = new RecordingMeterRegistry() { - private final LongCounter inFlightRequests = new RecordingInstruments.RecordingLongCounter( - CircuitBreakerMetrics.ES_BREAKER_PARENT_TRIP_COUNT_TOTAL, - recorder - ) { - @Override - public void incrementBy(long inc) { - throw new UnsupportedOperationException(); - } - - @Override - public void incrementBy(long inc, Map attributes) { - throw new UnsupportedOperationException(); - } - }; - - private final LongCounter fielddata = new RecordingInstruments.RecordingLongCounter( - CircuitBreakerMetrics.ES_BREAKER_FIELD_DATA_TRIP_COUNT_TOTAL, - recorder - ) { - @Override - public void incrementBy(long inc) { - throw new UnsupportedOperationException(); - } - - @Override - public void incrementBy(long inc, Map attributes) { - throw new UnsupportedOperationException(); - } - }; - - private final LongCounter request = new RecordingInstruments.RecordingLongCounter( - CircuitBreakerMetrics.ES_BREAKER_REQUEST_TRIP_COUNT_TOTAL, - recorder - ) { - @Override - public void incrementBy(long inc) { - throw new UnsupportedOperationException(); - } - - @Override - public void incrementBy(long inc, Map attributes) { - throw new UnsupportedOperationException(); - } - }; - - private final LongCounter parent = new RecordingInstruments.RecordingLongCounter( - CircuitBreakerMetrics.ES_BREAKER_PARENT_TRIP_COUNT_TOTAL, + private final LongCounter tripCount = new RecordingInstruments.RecordingLongCounter( + CircuitBreakerMetrics.ES_BREAKER_TRIP_COUNT_TOTAL, recorder ) { @Override @@ -111,14 +67,8 @@ public void incrementBy(long inc, Map attributes) { @Override protected LongCounter buildLongCounter(String name, String description, String unit) { - if (name.equals(inFlightRequests.getName())) { - return inFlightRequests; - } else if (name.equals(request.getName())) { - return request; - } else if (name.equals(fielddata.getName())) { - return fielddata; - } else if (name.equals(parent.getName())) { - return parent; + if (name.equals(tripCount.getName())) { + return tripCount; } throw new IllegalArgumentException("Unknown counter metric name [" + name + "]"); } @@ -136,15 +86,7 @@ public LongCounter getLongCounter(String name) { } private void assertCircuitBreakerName(final String name) { - assertThat( - name, - Matchers.oneOf( - CircuitBreakerMetrics.ES_BREAKER_FIELD_DATA_TRIP_COUNT_TOTAL, - CircuitBreakerMetrics.ES_BREAKER_IN_FLIGHT_REQUESTS_TRIP_COUNT_TOTAL, - CircuitBreakerMetrics.ES_BREAKER_PARENT_TRIP_COUNT_TOTAL, - CircuitBreakerMetrics.ES_BREAKER_REQUEST_TRIP_COUNT_TOTAL - ) - ); + assertThat(name, Matchers.oneOf(CircuitBreakerMetrics.ES_BREAKER_TRIP_COUNT_TOTAL)); } }; } @@ -193,6 +135,7 @@ public void testCircuitBreakerTripCountMetric() { final Measurement measurement = allMeasurements.get(0); assertThat(1L, Matchers.equalTo(measurement.getLong())); assertThat(1L, Matchers.equalTo(measurement.value())); + assertThat(Map.of(CIRCUIT_BREAKER_TYPE_ATTRIBUTE, "inflight_requests"), Matchers.equalTo(measurement.attributes())); assertThat(true, Matchers.equalTo(measurement.isLong())); return; } @@ -205,13 +148,9 @@ private List getMeasurements(String dataNodeName) { .toList() .get(0); return Measurement.combine( - Stream.of( - dataNodeTelemetryPlugin.getLongCounterMeasurement(CircuitBreakerMetrics.ES_BREAKER_IN_FLIGHT_REQUESTS_TRIP_COUNT_TOTAL) - .stream(), - dataNodeTelemetryPlugin.getLongCounterMeasurement(CircuitBreakerMetrics.ES_BREAKER_FIELD_DATA_TRIP_COUNT_TOTAL).stream(), - dataNodeTelemetryPlugin.getLongCounterMeasurement(CircuitBreakerMetrics.ES_BREAKER_REQUEST_TRIP_COUNT_TOTAL).stream(), - dataNodeTelemetryPlugin.getLongCounterMeasurement(CircuitBreakerMetrics.ES_BREAKER_PARENT_TRIP_COUNT_TOTAL).stream() - ).flatMap(Function.identity()).toList() + Stream.of(dataNodeTelemetryPlugin.getLongCounterMeasurement(CircuitBreakerMetrics.ES_BREAKER_TRIP_COUNT_TOTAL).stream()) + .flatMap(Function.identity()) + .toList() ); } From 99e885a6c42bd718fac5930e17cd9f2b7c506575 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Tue, 9 Jan 2024 09:24:54 -0500 Subject: [PATCH 45/47] [ML] Ensure unique IDs between inference models and trained model deployments (#103996) * Check for conflciting inference models before starting trained model deployment * ensure inference model ids and trained model deployment ids are unique * Added integration test and refactored integration tests to share superclass * Update docs/changelog/103996.yaml * Add inference plugin to ml support BaseMlIntegTestCase --- docs/changelog/103996.yaml | 5 + .../xpack/core/ml/job/messages/Messages.java | 2 + .../TransportPutInferenceModelAction.java | 11 ++ x-pack/plugin/ml/build.gradle | 1 + .../CoordinatedInferenceIngestIT.java | 106 +++--------------- .../ml/integration/InferenceBaseRestTest.java | 99 ++++++++++++++++ .../ml/integration/ModelIdUniquenessIT.java | 57 ++++++++++ ...portStartTrainedModelDeploymentAction.java | 80 ++++++++----- .../xpack/ml/support/BaseMlIntegTestCase.java | 4 +- 9 files changed, 241 insertions(+), 124 deletions(-) create mode 100644 docs/changelog/103996.yaml create mode 100644 x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceBaseRestTest.java create mode 100644 x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelIdUniquenessIT.java diff --git a/docs/changelog/103996.yaml b/docs/changelog/103996.yaml new file mode 100644 index 0000000000000..699b93fff4f03 --- /dev/null +++ b/docs/changelog/103996.yaml @@ -0,0 +1,5 @@ +pr: 103996 +summary: Ensure unique IDs between inference models and trained model deployments +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 36b4c0f1815ff..ad7a6b998fafd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -277,6 +277,8 @@ public final class Messages { public static final String REST_CANNOT_DELETE_FORECAST_IN_CURRENT_STATE = "Forecast(s) [{0}] for job [{1}] needs to be either FAILED or FINISHED to be deleted"; public static final String FIELD_CANNOT_BE_NULL = "Field [{0}] cannot be null"; + public static final String MODEL_ID_MATCHES_EXISTING_MODEL_IDS_BUT_MUST_NOT = + "Model IDs must be unique. Requested model ID [{}] matches existing model IDs but must not."; private Messages() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index d8f5b9424b162..142e071c9a133 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -36,6 +36,9 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.registry.ModelRegistry; @@ -128,6 +131,14 @@ protected void masterOperation( return; } + var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getModelId(), clusterService.state()); + if ((assignments == null || assignments.isEmpty()) == false) { + listener.onFailure( + ExceptionsHelper.badRequestException(Messages.MODEL_ID_MATCHES_EXISTING_MODEL_IDS_BUT_MUST_NOT, request.getModelId()) + ); + return; + } + if (service.get().isInClusterService()) { // Find the cluster platform as the service may need that // information when creating the model diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index bd01938a9a14a..74600a072ea0d 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -74,6 +74,7 @@ esplugin.bundleSpec.exclude 'platform/licenses/**' } dependencies { + testImplementation project(path: ':x-pack:plugin:inference') compileOnly project(':modules:lang-painless:spi') compileOnly project(path: xpackModule('core')) compileOnly project(path: xpackModule('autoscaling')) diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java index c4c3ee016be0e..4d90d2a186858 100644 --- a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java @@ -8,48 +8,19 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.client.Request; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Strings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.utils.MapHelper; -import org.junit.ClassRule; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class CoordinatedInferenceIngestIT extends ESRestTestCase { - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "trial") - .setting("xpack.security.enabled", "true") - .plugin("org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin") - .user("x_pack_rest_user", "x-pack-test-password") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restClientSettings() { - String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } +public class CoordinatedInferenceIngestIT extends InferenceBaseRestTest { @SuppressWarnings("unchecked") public void testIngestWithMultipleModelTypes() throws IOException { @@ -60,10 +31,10 @@ public void testIngestWithMultipleModelTypes() throws IOException { putInferenceServiceModel(inferenceServiceModelId, TaskType.SPARSE_EMBEDDING); putBoostedTreeRegressionModel(boostedTreeModelId); - putPyTorchModel(pyTorchModelId); - putPyTorchModelDefinition(pyTorchModelId); - putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); - startDeployment(pyTorchModelId); + putPyTorchModelTrainedModels(pyTorchModelId); + putPyTorchModelDefinitionTrainedModels(pyTorchModelId); + putPyTorchModelVocabularyTrainedModels(List.of("these", "are", "my", "words"), pyTorchModelId); + startDeploymentTrainedModels(pyTorchModelId); String docs = """ [ @@ -139,10 +110,10 @@ public void testPipelineConfiguredWithFieldMap() throws IOException { putInferenceServiceModel(inferenceServiceModelId, TaskType.SPARSE_EMBEDDING); putBoostedTreeRegressionModel(boostedTreeModelId); - putPyTorchModel(pyTorchModelId); - putPyTorchModelDefinition(pyTorchModelId); - putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); - startDeployment(pyTorchModelId); + putPyTorchModelTrainedModels(pyTorchModelId); + putPyTorchModelDefinitionTrainedModels(pyTorchModelId); + putPyTorchModelVocabularyTrainedModels(List.of("these", "are", "my", "words"), pyTorchModelId); + startDeploymentTrainedModels(pyTorchModelId); String docs = """ [ @@ -189,9 +160,9 @@ public void testPipelineConfiguredWithFieldMap() throws IOException { public void testWithUndeployedPyTorchModel() throws IOException { var pyTorchModelId = "test-undeployed"; - putPyTorchModel(pyTorchModelId); - putPyTorchModelDefinition(pyTorchModelId); - putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); + putPyTorchModelTrainedModels(pyTorchModelId); + putPyTorchModelDefinitionTrainedModels(pyTorchModelId); + putPyTorchModelVocabularyTrainedModels(List.of("these", "are", "my", "words"), pyTorchModelId); String docs = """ [ @@ -230,36 +201,6 @@ public void testWithUndeployedPyTorchModel() throws IOException { } } - private Map putInferenceServiceModel(String modelId, TaskType taskType) throws IOException { - String endpoint = org.elasticsearch.common.Strings.format("_inference/%s/%s", taskType, modelId); - var request = new Request("PUT", endpoint); - var modelConfig = ExampleModels.mockServiceModelConfig(); - request.setJsonEntity(modelConfig); - var response = client().performRequest(request); - return entityAsMap(response); - } - - private void putPyTorchModel(String modelId) throws IOException { - Request request = new Request("PUT", "_ml/trained_models/" + modelId); - var modelConfiguration = ExampleModels.pytorchPassThroughModelConfig(); - request.setJsonEntity(modelConfiguration); - client().performRequest(request); - } - - protected void putPyTorchModelVocabulary(List vocabulary, String modelId) throws IOException { - List vocabularyWithPad = new ArrayList<>(); - vocabularyWithPad.add("[PAD]"); - vocabularyWithPad.add("[UNK]"); - vocabularyWithPad.addAll(vocabulary); - String quotedWords = vocabularyWithPad.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(",")); - - Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/vocabulary"); - request.setJsonEntity(Strings.format(""" - { "vocabulary": [%s] } - """, quotedWords)); - client().performRequest(request); - } - protected Map simulatePipeline(String pipelineDef, String docs) throws IOException { String simulate = Strings.format(""" { @@ -272,27 +213,6 @@ protected Map simulatePipeline(String pipelineDef, String docs) return entityAsMap(client().performRequest(request)); } - protected void putPyTorchModelDefinition(String modelId) throws IOException { - Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0"); - String body = Strings.format( - """ - {"total_definition_length":%s,"definition": "%s","total_parts": 1}""", - ExampleModels.RAW_PYTORCH_MODEL_SIZE, - ExampleModels.BASE_64_ENCODED_PYTORCH_MODEL - ); - request.setJsonEntity(body); - client().performRequest(request); - } - - protected void startDeployment(String modelId) throws IOException { - String endPoint = "/_ml/trained_models/" - + modelId - + "/deployment/_start?timeout=40s&wait_for=started&threads_per_allocation=1&number_of_allocations=1"; - - Request request = new Request("POST", endPoint); - client().performRequest(request); - } - private void putBoostedTreeRegressionModel(String modelId) throws IOException { Request request = new Request("PUT", "_ml/trained_models/" + modelId); var modelConfiguration = ExampleModels.boostedTreeRegressionModel(); @@ -300,7 +220,7 @@ private void putBoostedTreeRegressionModel(String modelId) throws IOException { client().performRequest(request); } - public Map getModel(String modelId, TaskType taskType) throws IOException { + public Map getModelInference(String modelId, TaskType taskType) throws IOException { var endpoint = org.elasticsearch.common.Strings.format("_inference/%s/%s", taskType, modelId); var request = new Request("GET", endpoint); var reponse = client().performRequest(request); diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceBaseRestTest.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceBaseRestTest.java new file mode 100644 index 0000000000000..51838dba082b9 --- /dev/null +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceBaseRestTest.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class InferenceBaseRestTest extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin") + .user("x_pack_rest_user", "x-pack-test-password") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + protected Map putInferenceServiceModel(String modelId, TaskType taskType) throws IOException { + String endpoint = org.elasticsearch.common.Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("PUT", endpoint); + var modelConfig = ExampleModels.mockServiceModelConfig(); + request.setJsonEntity(modelConfig); + var response = client().performRequest(request); + return entityAsMap(response); + } + + protected void putPyTorchModelTrainedModels(String modelId) throws IOException { + Request request = new Request("PUT", "_ml/trained_models/" + modelId); + var modelConfiguration = ExampleModels.pytorchPassThroughModelConfig(); + request.setJsonEntity(modelConfiguration); + client().performRequest(request); + } + + protected void putPyTorchModelVocabularyTrainedModels(List vocabulary, String modelId) throws IOException { + List vocabularyWithPad = new ArrayList<>(); + vocabularyWithPad.add("[PAD]"); + vocabularyWithPad.add("[UNK]"); + vocabularyWithPad.addAll(vocabulary); + String quotedWords = vocabularyWithPad.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(",")); + + Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/vocabulary"); + request.setJsonEntity(Strings.format(""" + { "vocabulary": [%s] } + """, quotedWords)); + client().performRequest(request); + } + + protected void putPyTorchModelDefinitionTrainedModels(String modelId) throws IOException { + Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0"); + String body = Strings.format( + """ + {"total_definition_length":%s,"definition": "%s","total_parts": 1}""", + ExampleModels.RAW_PYTORCH_MODEL_SIZE, + ExampleModels.BASE_64_ENCODED_PYTORCH_MODEL + ); + request.setJsonEntity(body); + client().performRequest(request); + } + + protected void startDeploymentTrainedModels(String modelId) throws IOException { + String endPoint = "/_ml/trained_models/" + + modelId + + "/deployment/_start?timeout=40s&wait_for=started&threads_per_allocation=1&number_of_allocations=1"; + + Request request = new Request("POST", endPoint); + client().performRequest(request); + } +} diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelIdUniquenessIT.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelIdUniquenessIT.java new file mode 100644 index 0000000000000..9904cfb752de5 --- /dev/null +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelIdUniquenessIT.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.inference.TaskType; +import org.hamcrest.Matchers; + +import java.util.List; + +public class ModelIdUniquenessIT extends InferenceBaseRestTest { + + public void testPutInferenceModelFailsWhenTrainedModelWithIdAlreadyExists() throws Exception { + + String modelId = "duplicate_model_id"; + putPyTorchModelTrainedModels(modelId); + putPyTorchModelDefinitionTrainedModels(modelId); + putPyTorchModelVocabularyTrainedModels(List.of("these", "are", "my", "words"), modelId); + startDeploymentTrainedModels(modelId); + + var e = expectThrows(ResponseException.class, () -> putInferenceServiceModel(modelId, TaskType.SPARSE_EMBEDDING)); + assertThat( + e.getMessage(), + Matchers.containsString( + "Model IDs must be unique. Requested model ID [" + modelId + "] matches existing model IDs but must not." + ) + + ); + } + + public void testPutTrainedModelFailsWhenInferenceModelWithIdAlreadyExists() throws Exception { + + String modelId = "duplicate_model_id"; + putPyTorchModelTrainedModels(modelId); + putPyTorchModelDefinitionTrainedModels(modelId); + putPyTorchModelVocabularyTrainedModels(List.of("these", "are", "my", "words"), modelId); + + putInferenceServiceModel(modelId, TaskType.SPARSE_EMBEDDING); + + var e = expectThrows(ResponseException.class, () -> startDeploymentTrainedModels(modelId)); + assertThat( + e.getMessage(), + Matchers.containsString( + "Model IDs must be unique. Requested model ID [" + modelId + "] matches existing model IDs but must not." + ) + + ); + + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 28475dc70569f..0a5641836df4a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -44,6 +45,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; @@ -220,6 +222,8 @@ protected void masterOperation( ); }, listener::onFailure); + GetTrainedModelsAction.Request getModelWithDeploymentId = new GetTrainedModelsAction.Request(request.getDeploymentId()); + ActionListener getModelListener = ActionListener.wrap(getModelResponse -> { if (getModelResponse.getResources().results().size() > 1) { listener.onFailure( @@ -252,44 +256,60 @@ protected void masterOperation( return; } + ActionListener checkDeploymentIdDoesntAlreadyExist = ActionListener.wrap( + response -> listener.onFailure( + ExceptionsHelper.badRequestException( + "Deployment id [{}] is the same as an another model which is not the model being deployed. " + + "Deployment id can be the same as the model being deployed but cannot match a different model", + request.getDeploymentId(), + request.getModelId() + ) + ), + error -> { + if (ExceptionsHelper.unwrapCause(error) instanceof ResourceNotFoundException) { + // no name clash, continue with the deployment + checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, request.getTimeout(), modelSizeListener); + } else { + listener.onFailure(error); + } + } + ); + // If the model id isn't the same id as the deployment id - // check there isn't another model with deployment id + // check there isn't another model with that deployment id if (request.getModelId().equals(request.getDeploymentId()) == false) { - GetTrainedModelsAction.Request getModelWithDeploymentId = new GetTrainedModelsAction.Request(request.getDeploymentId()); - client.execute( - GetTrainedModelsAction.INSTANCE, - getModelWithDeploymentId, - ActionListener.wrap( - response -> listener.onFailure( - ExceptionsHelper.badRequestException( - "Deployment id [{}] is the same as an another model which is not the model being deployed. " - + "Deployment id can be the same as the model being deployed but cannot match a different model", - request.getDeploymentId(), - request.getModelId() - ) - ), - error -> { - if (ExceptionsHelper.unwrapCause(error) instanceof ResourceNotFoundException) { - // no name clash, continue with the deployment - checkFullModelDefinitionIsPresent( - client, - trainedModelConfig, - true, - request.getTimeout(), - modelSizeListener - ); - } else { - listener.onFailure(error); - } - } - ) - ); + client.execute(GetTrainedModelsAction.INSTANCE, getModelWithDeploymentId, checkDeploymentIdDoesntAlreadyExist); } else { checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, request.getTimeout(), modelSizeListener); } }, listener::onFailure); + ActionListener getInferenceModelListener = ActionListener.wrap((getInferenceModelResponse) -> { + if (getInferenceModelResponse.getModels().isEmpty() == false) { + listener.onFailure( + ExceptionsHelper.badRequestException(Messages.MODEL_ID_MATCHES_EXISTING_MODEL_IDS_BUT_MUST_NOT, request.getModelId()) + ); + } else { + getTrainedModelRequestExecution(request, getModelListener); + } + }, error -> { + if (ExceptionsHelper.unwrapCause(error) instanceof ResourceNotFoundException) { + // no name clash, continue with the deployment + getTrainedModelRequestExecution(request, getModelListener); + } else { + listener.onFailure(error); + } + }); + + GetInferenceModelAction.Request getModelRequest = new GetInferenceModelAction.Request(request.getModelId(), TaskType.ANY); + client.execute(GetInferenceModelAction.INSTANCE, getModelRequest, getInferenceModelListener); + } + + private void getTrainedModelRequestExecution( + StartTrainedModelDeploymentAction.Request request, + ActionListener getModelListener + ) { GetTrainedModelsAction.Request getModelRequest = new GetTrainedModelsAction.Request(request.getModelId()); client.execute(GetTrainedModelsAction.INSTANCE, getModelRequest, getModelListener); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index a987110192f00..5aaaa3ff958fd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -81,6 +81,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.utils.MlTaskState; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -159,7 +160,8 @@ protected Collection> nodePlugins() { DataStreamsPlugin.class, // To remove errors from parsing build in templates that contain scaled_float MapperExtrasPlugin.class, - Wildcard.class + Wildcard.class, + InferencePlugin.class ); } From e53a5cb2903533c096645cc7a718614cc1277f51 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 9 Jan 2024 15:35:15 +0100 Subject: [PATCH 46/47] mute ToCartesianPointTests (#104129) relates https://github.com/elastic/elasticsearch/issues/104127 --- .../function/scalar/convert/ToCartesianPointTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 399ce11ab3d4c..274165f6bbd45 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; @@ -26,6 +27,7 @@ import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104127") public class ToCartesianPointTests extends AbstractFunctionTestCase { public ToCartesianPointTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); From e93892c0fdfd8e0614a67368d3f746fe033ccab1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 9 Jan 2024 15:46:46 +0100 Subject: [PATCH 47/47] [DOCS] Expands inference API docs (#104047) Co-authored-by: David Kyle --- docs/reference/inference/delete-inference.asciidoc | 5 +++++ docs/reference/inference/get-inference.asciidoc | 6 ++++++ docs/reference/inference/inference-apis.asciidoc | 5 +++++ docs/reference/inference/post-inference.asciidoc | 5 +++++ docs/reference/inference/put-inference.asciidoc | 14 ++++++++++++-- 5 files changed, 33 insertions(+), 2 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index c9c3e16458618..692a96212f5ca 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -6,6 +6,11 @@ experimental[] Deletes an {infer} model deployment. +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your +own model, use the <>. + [discrete] [[delete-inference-api-request]] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index b81f2663ec9e1..45f4cb67e7674 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -6,6 +6,12 @@ experimental[] Retrieves {infer} model information. +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your +own model, use the <>. + + [discrete] [[get-inference-api-request]] ==== {api-request-title} diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 0476ac57287d9..cdc6bfe254ea2 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -4,6 +4,11 @@ experimental[] +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your +own model, use the <>. + You can use the following APIs to manage {infer} models and perform {infer}: * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index f8515a8b33c39..9ef633160f162 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -6,6 +6,11 @@ experimental[] Performs an inference task on an input text by using an {infer} model. +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your +own model, use the <>. + [discrete] [[post-inference-api-request]] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 9f0539fb551cb..5d517d313b9ea 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -6,6 +6,11 @@ experimental[] Creates a model to perform an {infer} task. +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your +own model, use the <>. + [discrete] [[put-inference-api-request]] @@ -27,6 +32,10 @@ Creates a model to perform an {infer} task. The create {infer} API enables you to create and configure an {infer} model to perform a specific {infer} task. +The following services are available through the {infer} API: +* ELSER +* OpenAI +* Hugging Face [discrete] [[put-inference-api-path-params]] @@ -52,8 +61,9 @@ The type of the {infer} task that the model will perform. Available task types: (Required, string) The type of service supported for the specified task type. Available services: -* `elser`, -* `openai`. +* `elser`: specify the `sparse_embedding` task type to use the ELSER service. +* `openai`: specify the `text_embedding` task type to use the OpenAI service. +* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face service. `service_settings`:: (Required, object)