From 9325cbf10aabe0e0a76f800b359344c7c50379b0 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 15 Nov 2021 14:50:20 +0100 Subject: [PATCH 01/90] Base module for APM Tracing (#80705) --- .../action/search/TransportSearchAction.java | 7 ++- .../java/org/elasticsearch/node/Node.java | 9 ++++ .../elasticsearch/plugins/TracingPlugin.java | 18 +++++++ .../snapshots/SnapshotResiliencyTests.java | 4 +- x-pack/plugin/apm-integration/build.gradle | 24 +++++++++ .../org/elasticsearch/xpack/apm/ApmIT.java | 39 +++++++++++++++ .../java/org/elasticsearch/xpack/apm/APM.java | 50 +++++++++++++++++++ .../elasticsearch/xpack/apm/APMTracer.java | 34 +++++++++++++ 8 files changed, 183 insertions(+), 2 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java create mode 100644 x-pack/plugin/apm-integration/build.gradle create mode 100644 x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java create mode 100644 x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java create mode 100644 x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index fb04afe0b268d..b73a35d3456d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; @@ -76,6 +77,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; @@ -129,6 +131,7 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.threadPool = threadPool; @@ -157,6 +161,7 @@ public TransportSearchAction( this.namedWriteableRegistry = namedWriteableRegistry; this.executorSelector = executorSelector; this.defaultPreFilterShardSize = DEFAULT_PRE_FILTER_SHARD_SIZE.get(clusterService.getSettings()); + this.tracer = Objects.requireNonNull(tracer); } private Map buildPerIndexOriginalIndices( diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 63a6fddab2109..0b62fb75ffbd6 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -147,6 +147,7 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; import org.elasticsearch.plugins.SystemIndexPlugin; +import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; @@ -207,6 +208,7 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.core.Types.forciblyCast; +import static org.elasticsearch.plugins.TracingPlugin.NO_TRACING; /** * A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used @@ -725,6 +727,12 @@ protected Node( clusterService.addListener(new SystemIndexMetadataUpgradeService(systemIndices, clusterService)); } new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetadataUpgraders); + + final TracingPlugin.Tracer tracer = (TracingPlugin.Tracer) pluginComponents.stream() + .filter(c -> c instanceof TracingPlugin.Tracer) + .findFirst() + .orElse(NO_TRACING); + final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), @@ -966,6 +974,7 @@ protected Node( b.bind(SystemIndices.class).toInstance(systemIndices); b.bind(PluginShutdownService.class).toInstance(pluginShutdownService); b.bind(ExecutorSelector.class).toInstance(executorSelector); + b.bind(TracingPlugin.Tracer.class).toInstance(tracer); }); injector = modules.createInjector(); diff --git a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java new file mode 100644 index 0000000000000..0aff495262c56 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.plugins; + +public interface TracingPlugin { + + Tracer NO_TRACING = something -> {}; + + interface Tracer { + void trace(String something); + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index dd7a1498e43e7..8ae4c9dee8a9b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -160,6 +160,7 @@ import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.node.ResponseCollectorService; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; @@ -1973,7 +1974,8 @@ protected void assertSnapshotOrGenericThread() { actionFilters, indexNameExpressionResolver, namedWriteableRegistry, - EmptySystemIndices.INSTANCE.getExecutorSelector() + EmptySystemIndices.INSTANCE.getExecutorSelector(), + TracingPlugin.NO_TRACING ) ); actions.put( diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle new file mode 100644 index 0000000000000..ff31706d2cea8 --- /dev/null +++ b/x-pack/plugin/apm-integration/build.gradle @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-test-artifact' + +esplugin { + name 'apm-integration' + description 'Provides APM integration for Elasticsearch' + classname 'org.elasticsearch.xpack.apm.APM' + extendedPlugins = ['x-pack-core'] +} + +dependencies { + compileOnly project(path: xpackModule('core')) + internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) +} + +// no unit-test for now +tasks.named("test").configure { enabled = false } diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java new file mode 100644 index 0000000000000..fcf00203daea9 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collection; +import java.util.List; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; + +public class ApmIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); + } + + public void testModule() { + List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(TracingPlugin.class); + assertThat(plugins, hasSize(1)); + + TracingPlugin.Tracer tracer = internalCluster().getInstance(TracingPlugin.Tracer.class); + assertThat(tracer, notNullValue()); + assertThat(tracer, instanceOf(APMTracer.class)); + tracer.trace("Hello World!"); + } +} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java new file mode 100644 index 0000000000000..948665d8f1afa --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +public class APM extends Plugin implements TracingPlugin { + + private final SetOnce tracer = new SetOnce<>(); + + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { + tracer.set(new APMTracer()); + return List.of(tracer.get()); + } +} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java new file mode 100644 index 0000000000000..eee3beb9b1f72 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.plugins.TracingPlugin; + +public class APMTracer extends AbstractLifecycleComponent implements TracingPlugin.Tracer { + + private static final Logger logger = LogManager.getLogger(APMTracer.class); + + public APMTracer() {} + + @Override + protected void doStart() {} + + @Override + protected void doStop() {} + + @Override + protected void doClose() {} + + @Override + public void trace(String something) { + logger.info("tracing {}", something); + } +} From 22ecf6f68e6dd41ba1a4a124cffdfcef82011899 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 15 Nov 2021 16:36:59 +0100 Subject: [PATCH 02/90] First set of deps (#80720) --- build-tools-internal/version.properties | 3 + x-pack/plugin/apm-integration/build.gradle | 25 +++ .../licenses/opentelemetry-api-1.9.0.jar.sha1 | 1 + .../licenses/opentelemetry-api-LICENSE.txt | 201 ++++++++++++++++++ .../licenses/opentelemetry-api-NOTICE.txt | 0 ...telemetry-api-metrics-1.9.0-alpha.jar.sha1 | 1 + .../opentelemetry-api-metrics-LICENSE.txt | 201 ++++++++++++++++++ .../opentelemetry-api-metrics-NOTICE.txt | 0 .../opentelemetry-context-1.9.0.jar.sha1 | 1 + .../opentelemetry-context-LICENSE.txt | 201 ++++++++++++++++++ .../licenses/opentelemetry-context-NOTICE.txt | 0 ...ntelemetry-exporter-logging-1.9.0.jar.sha1 | 1 + ...opentelemetry-exporter-logging-LICENSE.txt | 201 ++++++++++++++++++ .../opentelemetry-exporter-logging-NOTICE.txt | 0 .../licenses/opentelemetry-sdk-1.9.0.jar.sha1 | 1 + .../licenses/opentelemetry-sdk-LICENSE.txt | 201 ++++++++++++++++++ .../licenses/opentelemetry-sdk-NOTICE.txt | 0 .../opentelemetry-sdk-common-1.9.0.jar.sha1 | 1 + .../opentelemetry-sdk-common-LICENSE.txt | 201 ++++++++++++++++++ .../opentelemetry-sdk-common-NOTICE.txt | 0 ...telemetry-sdk-metrics-1.9.0-alpha.jar.sha1 | 1 + .../opentelemetry-sdk-metrics-LICENSE.txt | 201 ++++++++++++++++++ .../opentelemetry-sdk-metrics-NOTICE.txt | 0 .../opentelemetry-sdk-trace-1.9.0.jar.sha1 | 1 + .../opentelemetry-sdk-trace-LICENSE.txt | 201 ++++++++++++++++++ .../opentelemetry-sdk-trace-NOTICE.txt | 0 ...opentelemetry-semconv-1.9.0-alpha.jar.sha1 | 1 + .../opentelemetry-semconv-LICENSE.txt | 201 ++++++++++++++++++ .../licenses/opentelemetry-semconv-NOTICE.txt | 0 .../elasticsearch/xpack/apm/APMTracer.java | 33 ++- 30 files changed, 1877 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 50c1e3c32c8f7..c229d594d1ea0 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -55,3 +55,6 @@ jimfs_guava = 30.1-jre # test framework networknt_json_schema_validator = 1.0.48 + +# tracing +opentelemetry = 1.9.0 diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index ff31706d2cea8..4178826c1d3d6 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -16,9 +16,34 @@ esplugin { } dependencies { + implementation "io.opentelemetry:opentelemetry-api:${versions.opentelemetry}" + implementation "io.opentelemetry:opentelemetry-api-metrics:${versions.opentelemetry}-alpha" + implementation "io.opentelemetry:opentelemetry-context:${versions.opentelemetry}" + implementation "io.opentelemetry:opentelemetry-sdk:${versions.opentelemetry}" + implementation "io.opentelemetry:opentelemetry-sdk-trace:${versions.opentelemetry}" + implementation "io.opentelemetry:opentelemetry-sdk-common:${versions.opentelemetry}" + implementation "io.opentelemetry:opentelemetry-sdk-metrics:${versions.opentelemetry}-alpha" + implementation "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha" + implementation "io.opentelemetry:opentelemetry-exporter-logging:${versions.opentelemetry}" + compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) } // no unit-test for now tasks.named("test").configure { enabled = false } + +tasks.named("thirdPartyAudit").configure { + ignoreViolations( + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerLimitField', + 'io.opentelemetry.internal.shaded.jctools.util.UnsafeAccess', + 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess' + ) + ignoreMissingClasses( + 'io.opentelemetry.sdk.logs.data.Body', + 'io.opentelemetry.sdk.logs.data.LogData', + 'io.opentelemetry.sdk.logs.export.LogExporter' + ) +} diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..d056f5fd3e2cf --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 @@ -0,0 +1 @@ +464e96a2c7467aa46cc5e9e1a721dd4f8a7e5311 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 new file mode 100644 index 0000000000000..dafb3e3dc1241 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 @@ -0,0 +1 @@ +db9e872c623f59e84e520f28a6af2baf1a4d2001 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..d4d459c0ac9f6 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 @@ -0,0 +1 @@ +5fa03396a9b9e8864c3d92dce196cdd7ffe86fdb diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..8b0fd579dcfde --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 @@ -0,0 +1 @@ +3e8ff4f8da800522a6cc7c64eef0b2dc608f8c16 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..53b409c6d62a6 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 @@ -0,0 +1 @@ +5fde191eb694e83a3df923544a920bc7187a15e6 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..e6dde0fe6e543 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 @@ -0,0 +1 @@ +f834a3cfb30e7a80768b2205940d4e6d203b9e7a diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 new file mode 100644 index 0000000000000..92030be5e7dce --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 @@ -0,0 +1 @@ +247a4cbc19fc934d19e442900bd1b115914fd132 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..d5f9b677edeb8 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 @@ -0,0 +1 @@ +d7994581e392bc43f4bdb529f1e19ace8625f41a diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 new file mode 100644 index 0000000000000..13cb64f5bc13e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 @@ -0,0 +1 @@ +fe3b7c4eb863cf433594ba21dafa74206b6ab760 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index eee3beb9b1f72..ae6ea2961b37c 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -7,8 +7,19 @@ package org.elasticsearch.xpack.apm; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.plugins.TracingPlugin; @@ -16,10 +27,23 @@ public class APMTracer extends AbstractLifecycleComponent implements TracingPlug private static final Logger logger = LogManager.getLogger(APMTracer.class); + private volatile Tracer tracer; + public APMTracer() {} @Override - protected void doStart() {} + protected void doStart() { + SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(new LoggingSpanExporter())) + .build(); + + OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() + .setTracerProvider(sdkTracerProvider) + .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) + .build(); + tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); + tracer.spanBuilder("startup").startSpan().end(); + } @Override protected void doStop() {} @@ -29,6 +53,11 @@ protected void doClose() {} @Override public void trace(String something) { - logger.info("tracing {}", something); + final Tracer tracer = this.tracer; + if (tracer == null) { + return; + } + final Span span = tracer.spanBuilder("something").startSpan(); + span.end(); } } From ba34d70615c812df202369fdafd31a2fa299ed2c Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 15 Nov 2021 18:26:27 +0000 Subject: [PATCH 03/90] Integrate tracer with task manager (#80721) --- .../action/search/TransportSearchAction.java | 7 +- .../java/org/elasticsearch/node/Node.java | 13 ++-- .../elasticsearch/plugins/TracingPlugin.java | 8 ++- .../org/elasticsearch/tasks/TaskManager.java | 34 ++++++--- .../org/elasticsearch/tasks/TaskTracer.java | 68 ++++++++++++++++++ .../snapshots/SnapshotResiliencyTests.java | 4 +- .../org/elasticsearch/xpack/apm/ApmIT.java | 32 +++++++-- .../elasticsearch/xpack/apm/APMTracer.java | 71 +++++++++++++++---- 8 files changed, 190 insertions(+), 47 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/tasks/TaskTracer.java diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index b73a35d3456d1..fb04afe0b268d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -49,7 +49,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; @@ -77,7 +76,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; @@ -131,7 +129,6 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.threadPool = threadPool; @@ -161,7 +157,6 @@ public TransportSearchAction( this.namedWriteableRegistry = namedWriteableRegistry; this.executorSelector = executorSelector; this.defaultPreFilterShardSize = DEFAULT_PRE_FILTER_SHARD_SIZE.get(clusterService.getSettings()); - this.tracer = Objects.requireNonNull(tracer); } private Map buildPerIndexOriginalIndices( diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 0b62fb75ffbd6..792d52e6daf2b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -168,6 +168,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.tasks.TaskResultsService; +import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -208,7 +209,6 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.core.Types.forciblyCast; -import static org.elasticsearch.plugins.TracingPlugin.NO_TRACING; /** * A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used @@ -728,11 +728,6 @@ protected Node( } new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetadataUpgraders); - final TracingPlugin.Tracer tracer = (TracingPlugin.Tracer) pluginComponents.stream() - .filter(c -> c instanceof TracingPlugin.Tracer) - .findFirst() - .orElse(NO_TRACING); - final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), @@ -757,6 +752,11 @@ protected Node( final HttpServerTransport httpServerTransport = newHttpTransport(networkModule); final IndexingPressure indexingLimits = new IndexingPressure(settings); + final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); + pluginComponents.stream() + .map(c -> c instanceof TracingPlugin.Tracer ? (TracingPlugin.Tracer) c : null) + .forEach(taskTracer::addTracer); + final RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); RepositoriesModule repositoriesModule = new RepositoriesModule( this.environment, @@ -974,7 +974,6 @@ protected Node( b.bind(SystemIndices.class).toInstance(systemIndices); b.bind(PluginShutdownService.class).toInstance(pluginShutdownService); b.bind(ExecutorSelector.class).toInstance(executorSelector); - b.bind(TracingPlugin.Tracer.class).toInstance(tracer); }); injector = modules.createInjector(); diff --git a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java index 0aff495262c56..cf1860184d57d 100644 --- a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java @@ -8,11 +8,13 @@ package org.elasticsearch.plugins; -public interface TracingPlugin { +import org.elasticsearch.tasks.Task; - Tracer NO_TRACING = something -> {}; +public interface TracingPlugin { interface Tracer { - void trace(String something); + void onTaskRegistered(Task task); + + void onTaskUnregistered(Task task); } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index b4fe32454f03b..df711096a12c2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -92,6 +92,8 @@ public class TaskManager implements ClusterStateApplier { private DiscoveryNodes lastDiscoveryNodes = DiscoveryNodes.EMPTY_NODES; + private final TaskTracer taskTracer = new TaskTracer(); + private final ByteSizeValue maxHeaderSize; private final Map channelPendingTaskTrackers = ConcurrentCollections.newConcurrentMap(); private final SetOnce cancellationService = new SetOnce<>(); @@ -141,6 +143,7 @@ public Task register(String type, String action, TaskAwareRequest request) { } else { Task previousTask = tasks.put(task.getId(), task); assert previousTask == null; + taskTracer.onTaskRegistered(task); } return task; } @@ -193,6 +196,7 @@ private void registerCancellableTask(Task task) { CancellableTask cancellableTask = (CancellableTask) task; CancellableTaskHolder holder = new CancellableTaskHolder(cancellableTask); cancellableTasks.put(task, holder); + taskTracer.onTaskRegistered(task); // Check if this task was banned before we start it. The empty check is used to avoid // computing the hash code of the parent taskId as most of the time bannedParents is empty. if (task.getParentTaskId().isSet() && bannedParents.isEmpty() == false) { @@ -231,19 +235,23 @@ public void cancel(CancellableTask task, String reason, Runnable listener) { */ public Task unregister(Task task) { logger.trace("unregister task for id: {}", task.getId()); - if (task instanceof CancellableTask) { - CancellableTaskHolder holder = cancellableTasks.remove(task); - if (holder != null) { - holder.finish(); - assert holder.task == task; - return holder.getTask(); + try { + if (task instanceof CancellableTask) { + CancellableTaskHolder holder = cancellableTasks.remove(task); + if (holder != null) { + holder.finish(); + assert holder.task == task; + return holder.getTask(); + } else { + return null; + } } else { - return null; + final Task removedTask = tasks.remove(task.getId()); + assert removedTask == null || removedTask == task; + return removedTask; } - } else { - final Task removedTask = tasks.remove(task.getId()); - assert removedTask == null || removedTask == task; - return removedTask; + } finally { + taskTracer.onTaskUnregistered(task); } } @@ -730,4 +738,8 @@ public void cancelTaskAndDescendants(CancellableTask task, String reason, boolea throw new IllegalStateException("TaskCancellationService is not initialized"); } } + + public TaskTracer getTaskTracer() { + return taskTracer; + } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java new file mode 100644 index 0000000000000..46b55d62dfccc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.tasks; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.plugins.TracingPlugin; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +public class TaskTracer { + + private static final Logger logger = LogManager.getLogger(); + + private final List tracers = new CopyOnWriteArrayList<>(); + + public void addTracer(TracingPlugin.Tracer tracer) { + if (tracer != null) { + tracers.add(tracer); + } + } + + public void onTaskRegistered(Task task) { + for (TracingPlugin.Tracer tracer : tracers) { + try { + tracer.onTaskRegistered(task); + } catch (Exception e) { + assert false : e; + logger.warn( + new ParameterizedMessage( + "task tracing listener [{}] failed on registration of task [{}][{}]", + tracer, + task.getId(), + task.getAction() + ), + e + ); + } + } + } + + public void onTaskUnregistered(Task task) { + for (TracingPlugin.Tracer tracer : tracers) { + try { + tracer.onTaskUnregistered(task); + } catch (Exception e) { + assert false : e; + logger.warn( + new ParameterizedMessage( + "task tracing listener [{}] failed on unregistration of task [{}][{}]", + tracer, + task.getId(), + task.getAction() + ), + e + ); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 8ae4c9dee8a9b..dd7a1498e43e7 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -160,7 +160,6 @@ import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.node.ResponseCollectorService; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; @@ -1974,8 +1973,7 @@ protected void assertSnapshotOrGenericThread() { actionFilters, indexNameExpressionResolver, namedWriteableRegistry, - EmptySystemIndices.INSTANCE.getExecutorSelector(), - TracingPlugin.NO_TRACING + EmptySystemIndices.INSTANCE.getExecutorSelector() ) ); actions.put( diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index fcf00203daea9..070b05a6a75c3 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -7,17 +7,24 @@ package org.elasticsearch.xpack.apm; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.trace.data.SpanData; + import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; import java.util.Collection; +import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; public class ApmIT extends ESIntegTestCase { @@ -31,9 +38,24 @@ public void testModule() { List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(TracingPlugin.class); assertThat(plugins, hasSize(1)); - TracingPlugin.Tracer tracer = internalCluster().getInstance(TracingPlugin.Tracer.class); - assertThat(tracer, notNullValue()); - assertThat(tracer, instanceOf(APMTracer.class)); - tracer.trace("Hello World!"); + TransportService transportService = internalCluster().getInstance(TransportService.class); + final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); + assertThat(taskTracer, notNullValue()); + + final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); + + taskTracer.onTaskRegistered(testTask); + taskTracer.onTaskUnregistered(testTask); + + final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); + boolean found = false; + final Long targetId = testTask.getId(); + for (SpanData capturedSpan : capturedSpans) { + if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { + found = true; + assertTrue(capturedSpan.hasEnded()); + } + } + assertTrue(found); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index ae6ea2961b37c..0a055401dcba3 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -12,35 +12,43 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; -import io.opentelemetry.exporter.logging.LoggingSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.tasks.Task; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; public class APMTracer extends AbstractLifecycleComponent implements TracingPlugin.Tracer { - private static final Logger logger = LogManager.getLogger(APMTracer.class); + public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); - private volatile Tracer tracer; + private final Map taskSpans = ConcurrentCollections.newConcurrentMap(); - public APMTracer() {} + private volatile Tracer tracer; @Override protected void doStart() { SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .addSpanProcessor(SimpleSpanProcessor.create(new LoggingSpanExporter())) + .addSpanProcessor(SimpleSpanProcessor.create(CAPTURING_SPAN_EXPORTER)) .build(); OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(sdkTracerProvider) .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) .build(); + tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); tracer.spanBuilder("startup").startSpan().end(); } @@ -52,12 +60,51 @@ protected void doStop() {} protected void doClose() {} @Override - public void trace(String something) { + public void onTaskRegistered(Task task) { final Tracer tracer = this.tracer; - if (tracer == null) { - return; + if (tracer != null) { + taskSpans.computeIfAbsent(task.getId(), taskId -> { + final Span span = tracer.spanBuilder(task.getAction()).startSpan(); + span.setAttribute("es.task.id", task.getId()); + return span; + }); + } + } + + @Override + public void onTaskUnregistered(Task task) { + final Span span = taskSpans.remove(task.getId()); + if (span != null) { + span.end(); + } + } + + public static class CapturingSpanExporter implements SpanExporter { + + private List capturedSpans = new ArrayList<>(); + + public void clear() { + capturedSpans.clear(); + } + + public List getCapturedSpans() { + return List.copyOf(capturedSpans); + } + + @Override + public CompletableResultCode export(Collection spans) { + capturedSpans.addAll(spans); + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); } - final Span span = tracer.spanBuilder("something").startSpan(); - span.end(); } } From d5a250394361155543d6425a2f3f1c484992e67c Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 17 Nov 2021 10:33:26 +0100 Subject: [PATCH 04/90] Use OpenTelemetry with HTTP/gRPC exporters in apm-integration (#80762) This pull request adds support for HTPP/gRPC with the OpenTelemetry SDK in the `apm-integration` plugin. It adds the required security permissions to the plugin to make it work - we should try to reduce this list one day. The plumbing to pass Elastic's Cloud APM server credentials is not fully there yet but the current `ApmIT` test can be executed with `-Dtests.apm.endpoint=https://... -Dtests.apm.token=ABC` and traces should be sent to Elastic APM. _Works on my machine™_ --- x-pack/plugin/apm-integration/build.gradle | 152 ++++++++++++- .../licenses/failureaccess-1.0.1.jar.sha1 | 1 + ...-LICENSE.txt => failureaccess-LICENSE.txt} | 0 ...pi-NOTICE.txt => failureaccess-NOTICE.txt} | 0 ...dk-common-LICENSE.txt => grpc-LICENSE.txt} | 1 + .../apm-integration/licenses/grpc-NOTICE.txt | 62 ++++++ .../licenses/grpc-api-1.42.1.jar.sha1 | 1 + .../licenses/grpc-context-1.42.1.jar.sha1 | 1 + .../licenses/grpc-core-1.42.1.jar.sha1 | 1 + .../licenses/grpc-netty-1.42.1.jar.sha1 | 1 + .../licenses/grpc-stub-1.42.1.jar.sha1 | 1 + .../licenses/guava-31.0.1-jre.jar.sha1 | 1 + ...dk-trace-LICENSE.txt => guava-LICENSE.txt} | 1 + ...pi-metrics-NOTICE.txt => guava-NOTICE.txt} | 0 ...-metrics-LICENSE.txt => netty-LICENSE.txt} | 1 + .../apm-integration/licenses/netty-NOTICE.txt | 116 ++++++++++ .../netty-buffer-4.1.66.Final.jar.sha1 | 1 + .../netty-codec-4.1.66.Final.jar.sha1 | 1 + .../netty-codec-http-4.1.66.Final.jar.sha1 | 1 + .../netty-codec-http2-4.1.66.Final.jar.sha1 | 1 + .../netty-common-4.1.66.Final.jar.sha1 | 1 + .../netty-handler-4.1.66.Final.jar.sha1 | 1 + .../netty-resolver-4.1.66.Final.jar.sha1 | 1 + .../netty-transport-4.1.66.Final.jar.sha1 | 1 + .../licenses/okhttp-3.14.9.jar.sha1 | 1 + ...metrics-LICENSE.txt => okhttp-LICENSE.txt} | 0 ...y-context-NOTICE.txt => okhttp-NOTICE.txt} | 0 .../licenses/okio-1.17.2.jar.sha1 | 1 + ...y-context-LICENSE.txt => okio-LICENSE.txt} | 0 ...ter-logging-NOTICE.txt => okio-NOTICE.txt} | 0 ...-LICENSE.txt => opentelemetry-LICENSE.txt} | 0 ...dk-NOTICE.txt => opentelemetry-NOTICE.txt} | 0 ...emetry-exporter-otlp-common-1.9.0.jar.sha1 | 1 + ...lemetry-exporter-otlp-trace-1.9.0.jar.sha1 | 1 + .../opentelemetry-sdk-common-NOTICE.txt | 0 .../opentelemetry-sdk-metrics-NOTICE.txt | 0 .../opentelemetry-sdk-trace-NOTICE.txt | 0 .../opentelemetry-semconv-LICENSE.txt | 201 ------------------ .../licenses/opentelemetry-semconv-NOTICE.txt | 0 ...y-sdk-LICENSE.txt => perfmark-LICENSE.txt} | 0 .../licenses/perfmark-NOTICE.txt | 41 ++++ .../licenses/perfmark-api-0.24.0.jar.sha1 | 1 + .../licenses/perfmark-impl-0.24.0.jar.sha1 | 1 + .../org/elasticsearch/xpack/apm/ApmIT.java | 10 + .../java/org/elasticsearch/xpack/apm/APM.java | 14 +- .../elasticsearch/xpack/apm/APMTracer.java | 85 +++++++- .../plugin-metadata/plugin-security.policy | 17 ++ 47 files changed, 512 insertions(+), 210 deletions(-) create mode 100644 x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 rename x-pack/plugin/apm-integration/licenses/{opentelemetry-api-LICENSE.txt => failureaccess-LICENSE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-api-NOTICE.txt => failureaccess-NOTICE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-sdk-common-LICENSE.txt => grpc-LICENSE.txt} (99%) create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 rename x-pack/plugin/apm-integration/licenses/{opentelemetry-sdk-trace-LICENSE.txt => guava-LICENSE.txt} (99%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-api-metrics-NOTICE.txt => guava-NOTICE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-sdk-metrics-LICENSE.txt => netty-LICENSE.txt} (99%) create mode 100644 x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 rename x-pack/plugin/apm-integration/licenses/{opentelemetry-api-metrics-LICENSE.txt => okhttp-LICENSE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-context-NOTICE.txt => okhttp-NOTICE.txt} (100%) create mode 100644 x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 rename x-pack/plugin/apm-integration/licenses/{opentelemetry-context-LICENSE.txt => okio-LICENSE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-exporter-logging-NOTICE.txt => okio-NOTICE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-exporter-logging-LICENSE.txt => opentelemetry-LICENSE.txt} (100%) rename x-pack/plugin/apm-integration/licenses/{opentelemetry-sdk-NOTICE.txt => opentelemetry-NOTICE.txt} (100%) create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt rename x-pack/plugin/apm-integration/licenses/{opentelemetry-sdk-LICENSE.txt => perfmark-LICENSE.txt} (100%) create mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 4178826c1d3d6..2616d09aa15b4 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -25,6 +25,34 @@ dependencies { implementation "io.opentelemetry:opentelemetry-sdk-metrics:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-exporter-logging:${versions.opentelemetry}" + // required to use OTLP (to get OtlpGrpcSpanExporter) + implementation "io.opentelemetry:opentelemetry-exporter-otlp-trace:${versions.opentelemetry}" + // required by OTLP (to get GrpcExporter, Marshaller etc) + implementation "io.opentelemetry:opentelemetry-exporter-otlp-common:${versions.opentelemetry}" + // required by OTLP common & trace + implementation "io.grpc:grpc-api:1.42.1" + implementation "io.grpc:grpc-stub:1.42.1" + implementation "io.grpc:grpc-core:1.42.1" + implementation "io.grpc:grpc-context:1.42.1" + // netty HTTP client is used for gRPC calls to Elastic's APM server + implementation "io.grpc:grpc-netty:1.42.1" + // okio and okhttp are required by GrpcExporter as the default implementation, but we don't use it + implementation "com.squareup.okhttp3:okhttp:3.14.9" + implementation "com.squareup.okio:okio:1.17.2" + // required by io.grpc + implementation 'io.perfmark:perfmark-api:0.24.0' + implementation 'io.perfmark:perfmark-impl:0.24.0' + implementation 'com.google.guava:failureaccess:1.0.1' + // required by grpc-netty + api "io.netty:netty-buffer:${versions.netty}" + api "io.netty:netty-transport:${versions.netty}" + api "io.netty:netty-common:${versions.netty}" + api "io.netty:netty-codec:${versions.netty}" + api "io.netty:netty-codec-http:${versions.netty}" + api "io.netty:netty-codec-http2:${versions.netty}" + api "io.netty:netty-handler:${versions.netty}" + api "io.netty:netty-resolver:${versions.netty}" + runtimeOnly 'com.google.guava:guava:31.0.1-jre' compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) @@ -33,17 +61,137 @@ dependencies { // no unit-test for now tasks.named("test").configure { enabled = false } +tasks.named("dependencyLicenses").configure { + mapping from: /opentelemetry-.*/, to: 'opentelemetry' + mapping from: /grpc-.*/, to: 'grpc' + mapping from: /netty-.*/, to: 'netty' + mapping from: /perfmark-.*/, to: 'perfmark' +} + tasks.named("thirdPartyAudit").configure { ignoreViolations( 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueConsumerIndexField', 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerIndexField', 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerLimitField', 'io.opentelemetry.internal.shaded.jctools.util.UnsafeAccess', - 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess' + 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess', + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + 'io.netty.util.internal.PlatformDependent0', + 'io.netty.util.internal.PlatformDependent0$1', + 'io.netty.util.internal.PlatformDependent0$2', + 'io.netty.util.internal.PlatformDependent0$3', + 'io.netty.util.internal.PlatformDependent0$5', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', + 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', + 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', + 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', + 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess' ) ignoreMissingClasses( 'io.opentelemetry.sdk.logs.data.Body', 'io.opentelemetry.sdk.logs.data.LogData', - 'io.opentelemetry.sdk.logs.export.LogExporter' + 'io.opentelemetry.sdk.logs.export.LogExporter', + 'android.net.ssl.SSLSockets', + 'android.os.Build$VERSION', + 'android.util.Log', + 'com.aayushatharva.brotli4j.Brotli4jLoader', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', + 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', + 'com.aayushatharva.brotli4j.encoder.Encoder', + 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', + 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', + 'com.github.luben.zstd.Zstd', + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonToken', + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.ExtensionRegistryLite', + 'com.google.protobuf.MessageLite', + 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLiteOrBuilder', + 'com.google.protobuf.Parser', + 'com.google.protobuf.nano.CodedOutputByteBufferNano', + 'com.google.protobuf.nano.MessageNano', + 'com.jcraft.jzlib.Deflater', + 'com.jcraft.jzlib.Inflater', + 'com.jcraft.jzlib.JZlib', + 'com.jcraft.jzlib.JZlib$WrapperType', + 'com.ning.compress.BufferRecycler', + 'com.ning.compress.lzf.ChunkDecoder', + 'com.ning.compress.lzf.ChunkEncoder', + 'com.ning.compress.lzf.LZFChunk', + 'com.ning.compress.lzf.LZFEncoder', + 'com.ning.compress.lzf.util.ChunkDecoderFactory', + 'com.ning.compress.lzf.util.ChunkEncoderFactory', + 'io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts', + 'io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder', + 'io.grpc.netty.shaded.io.netty.handler.ssl.SslContextBuilder', + 'io.grpc.okhttp.OkHttpChannelBuilder', + 'io.netty.handler.proxy.HttpProxyHandler', + 'io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod', + 'io.netty.internal.tcnative.AsyncTask', + 'io.netty.internal.tcnative.Buffer', + 'io.netty.internal.tcnative.CertificateCallback', + 'io.netty.internal.tcnative.CertificateVerifier', + 'io.netty.internal.tcnative.Library', + 'io.netty.internal.tcnative.ResultCallback', + 'io.netty.internal.tcnative.SSL', + 'io.netty.internal.tcnative.SSLContext', + 'io.netty.internal.tcnative.SSLPrivateKeyMethod', + 'io.netty.internal.tcnative.SSLSession', + 'io.netty.internal.tcnative.SSLSessionCache', + 'io.netty.internal.tcnative.SessionTicketKey', + 'io.netty.internal.tcnative.SniHostNameMatcher', + 'io.opentelemetry.sdk.logs.data.Severity', + 'lzma.sdk.lzma.Encoder', + 'org.bouncycastle.asn1.x500.X500Name', + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', + 'org.bouncycastle.jce.provider.BouncyCastleProvider', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + 'org.conscrypt.AllocatedBuffer', + 'org.conscrypt.BufferAllocator', + 'org.conscrypt.Conscrypt', + 'org.conscrypt.Conscrypt$ProviderBuilder', + 'org.conscrypt.HandshakeListener', + 'org.eclipse.jetty.alpn.ALPN', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.jboss.marshalling.ByteInput', + 'org.jboss.marshalling.ByteOutput', + 'org.jboss.marshalling.Marshaller', + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.slf4j.helpers.FormattingTuple', + 'org.slf4j.helpers.MessageFormatter', + 'org.slf4j.spi.LocationAwareLogger', + 'reactor.blockhound.BlockHound$Builder', + 'reactor.blockhound.integration.BlockHoundIntegration' ) } diff --git a/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 new file mode 100644 index 0000000000000..4798b37e20691 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 @@ -0,0 +1 @@ +1dcf1de382a0bf95a3d8b0849546c88bac1292c9 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-api-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/failureaccess-NOTICE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-api-NOTICE.txt rename to x-pack/plugin/apm-integration/licenses/failureaccess-NOTICE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt similarity index 99% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt index 261eeb9e9f8b2..d645695673349 100644 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-LICENSE.txt +++ b/x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt new file mode 100644 index 0000000000000..f70c5620cf75a --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt @@ -0,0 +1,62 @@ +Copyright 2014 The gRPC Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'OkHttp', an open source +HTTP & SPDY client for Android and Java applications, which can be obtained +at: + + * LICENSE: + * okhttp/third_party/okhttp/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/square/okhttp + * LOCATION_IN_GRPC: + * okhttp/third_party/okhttp + +This product contains a modified portion of 'Envoy', an open source +cloud-native high-performance edge/middle/service proxy, which can be +obtained at: + + * LICENSE: + * xds/third_party/envoy/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/envoy/NOTICE + * HOMEPAGE: + * https://www.envoyproxy.io + * LOCATION_IN_GRPC: + * xds/third_party/envoy + +This product contains a modified portion of 'protoc-gen-validate (PGV)', +an open source protoc plugin to generate polyglot message validators, +which can be obtained at: + + * LICENSE: + * xds/third_party/protoc-gen-validate/LICENSE (Apache License 2.0) + * NOTICE: + * xds/third_party/protoc-gen-validate/NOTICE + * HOMEPAGE: + * https://github.com/envoyproxy/protoc-gen-validate + * LOCATION_IN_GRPC: + * xds/third_party/protoc-gen-validate + +This product contains a modified portion of 'udpa', +an open source universal data plane API, which can be obtained at: + + * LICENSE: + * xds/third_party/udpa/LICENSE (Apache License 2.0) + * HOMEPAGE: + * https://github.com/cncf/udpa + * LOCATION_IN_GRPC: + * xds/third_party/udpa diff --git a/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 new file mode 100644 index 0000000000000..bab20c7d44d7e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 @@ -0,0 +1 @@ +4a7f734f57ad5b68e4ac591481eb562cdb3d2a94 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 new file mode 100644 index 0000000000000..b85128c4772ec --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 @@ -0,0 +1 @@ +c0cc9e5e08ced39792908aeda77e694bff39cea1 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 new file mode 100644 index 0000000000000..71d9cdc9b7f1e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 @@ -0,0 +1 @@ +2d142647452a700189908baa488dc928233e8be9 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 new file mode 100644 index 0000000000000..764083f762f63 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 @@ -0,0 +1 @@ +f2bdcaf11b237122efbd8a30e4177250fde5b458 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 new file mode 100644 index 0000000000000..d3390f4e63af9 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 @@ -0,0 +1 @@ +6fa0c2fb4ff581c89b4aab2d47fb2b568503f630 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 b/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 new file mode 100644 index 0000000000000..1906a4f95370c --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 @@ -0,0 +1 @@ +119ea2b2bc205b138974d351777b20f02b92704b \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt similarity index 99% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt index 261eeb9e9f8b2..d645695673349 100644 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-LICENSE.txt +++ b/x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/guava-NOTICE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-NOTICE.txt rename to x-pack/plugin/apm-integration/licenses/guava-NOTICE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt similarity index 99% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt index 261eeb9e9f8b2..d645695673349 100644 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-LICENSE.txt +++ b/x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt @@ -1,3 +1,4 @@ + Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt new file mode 100644 index 0000000000000..5bbf91a14de23 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..973ba015d2079 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +8d4be9506ea5f54af58bcd596ba3fe2fc5036413 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..ae8837c2664a8 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +e7cfff848e6c1294645638d74fce6ad89cc6f3f3 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..74435145e041c --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +15fff6bae9e4b09ba5d48a70bb88841c9fc22a32 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..3b563c112dcc5 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +83f51766236096bd6d493a9f858711fd7974268e \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..164add2d48e57 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +d1c4eda38f525a02fb1ea8d94a8d98dc2935fd02 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..657b3ad736c1e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +1e6ec9b58725a96b2bd0f173709b59c79175225c \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..4a085c20c9ec0 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +2f018d8df6f533c3d75dc5fdb11071bc2e7b591b \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 new file mode 100644 index 0000000000000..c21ce614d86e9 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 @@ -0,0 +1 @@ +3511bc4e13198de644eefe4c8c758245145da128 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 b/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 new file mode 100644 index 0000000000000..b6f5113e444cc --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 @@ -0,0 +1 @@ +3e6d101343c7ea687cd593e4990f73b25c878383 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/okhttp-LICENSE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/okhttp-LICENSE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/okhttp-NOTICE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-context-NOTICE.txt rename to x-pack/plugin/apm-integration/licenses/okhttp-NOTICE.txt diff --git a/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 b/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 new file mode 100644 index 0000000000000..bf2e361cabc50 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 @@ -0,0 +1 @@ +78c7820b205002da4d2d137f6f312bd64b3d6049 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-context-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/okio-NOTICE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-NOTICE.txt rename to x-pack/plugin/apm-integration/licenses/okio-NOTICE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-LICENSE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/opentelemetry-LICENSE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-NOTICE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-NOTICE.txt rename to x-pack/plugin/apm-integration/licenses/opentelemetry-NOTICE.txt diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..31a9e9a11a774 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 @@ -0,0 +1 @@ +ae38f65225d617d80d7b6b4abf109b6edda08112 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..cbc26ba2d7da1 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 @@ -0,0 +1 @@ +395fbd7c26796cf5233f003afc20fad7479f9a6f \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt similarity index 100% rename from x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt new file mode 100644 index 0000000000000..63e4853415dff --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt @@ -0,0 +1,41 @@ + +Copyright 2019 Google LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +----------------------------------------------------------------------- + +This product contains a modified portion of 'Catapult', an open source +Trace Event viewer for Chome, Linux, and Android applications, which can +be obtained at: + + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/catapult-project/catapult + +This product contains a modified portion of 'Polymer', a library for Web +Components, which can be obtained at: + * LICENSE: + * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/polymer/LICENSE (New BSD License) + * HOMEPAGE: + * https://github.com/Polymer/polymer + + +This product contains a modified portion of 'ASM', an open source +Java Bytecode library, which can be obtained at: + + * LICENSE: + * agent/src/main/resources/io/perfmark/agent/third_party/asm/LICENSE (BSD style License) + * HOMEPAGE: + * https://asm.ow2.io/ diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 new file mode 100644 index 0000000000000..15b718b038ff5 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 @@ -0,0 +1 @@ +135f31424e015f26aa8af8f6df8add4490acac22 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 new file mode 100644 index 0000000000000..7d2e3e0d40f4e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 @@ -0,0 +1 @@ +7c0a611d5eda67cc8dfddad9af1c626ed3da91a2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 070b05a6a75c3..b2fcb6b6b9a4f 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -10,6 +10,8 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.sdk.trace.data.SpanData; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; @@ -34,6 +36,14 @@ protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); } + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(APMTracer.APM_ENDPOINT_SETTING.getKey(), System.getProperty("tests.apm.endpoint", "")); + secureSettings.setString(APMTracer.APM_TOKEN_SETTING.getKey(), System.getProperty("tests.apm.token", "")); + return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).setSecureSettings(secureSettings).build(); + } + public void testModule() { List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(TracingPlugin.class); assertThat(plugins, hasSize(1)); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 948665d8f1afa..5fbb72c36f4e5 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -12,6 +12,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; @@ -29,6 +31,11 @@ public class APM extends Plugin implements TracingPlugin { private final SetOnce tracer = new SetOnce<>(); + private final Settings settings; + + public APM(Settings settings) { + this.settings = settings; + } @Override public Collection createComponents( @@ -44,7 +51,12 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - tracer.set(new APMTracer()); + tracer.set(new APMTracer(settings, clusterService)); return List.of(tracer.get()); } + + @Override + public List> getSettings() { + return List.of(APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING); + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 0a055401dcba3..e89a76f092db9 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -8,53 +8,104 @@ package org.elasticsearch.xpack.apm; import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; import org.elasticsearch.Version; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.tasks.Task; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; public class APMTracer extends AbstractLifecycleComponent implements TracingPlugin.Tracer { public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); + static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); + static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); + private final Map taskSpans = ConcurrentCollections.newConcurrentMap(); + private final ClusterService clusterService; + private final SecureString endpoint; + private final SecureString token; + private volatile SdkTracerProvider provider; private volatile Tracer tracer; + public APMTracer(Settings settings, ClusterService clusterService) { + this.endpoint = APM_ENDPOINT_SETTING.get(settings); + this.token = APM_TOKEN_SETTING.get(settings); + this.clusterService = Objects.requireNonNull(clusterService); + } + @Override protected void doStart() { - SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .addSpanProcessor(SimpleSpanProcessor.create(CAPTURING_SPAN_EXPORTER)) - .build(); + final String nodeName = clusterService.getNodeName(); + final String endpoint = this.endpoint.toString(); + final String token = this.token.toString(); + + this.provider = AccessController.doPrivileged( + (PrivilegedAction) () -> SdkTracerProvider.builder() + .setResource( + Resource.create( + Attributes.of( + ResourceAttributes.SERVICE_NAME, + nodeName, + ResourceAttributes.SERVICE_VERSION, + Version.CURRENT.toString(), + ResourceAttributes.DEPLOYMENT_ENVIRONMENT, + "dev" + ) + ) + ) + .addSpanProcessor(createSpanProcessor(endpoint, token)) + .build() + ); OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() - .setTracerProvider(sdkTracerProvider) + .setTracerProvider(provider) .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) .build(); - tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); tracer.spanBuilder("startup").startSpan().end(); } @Override - protected void doStop() {} + protected void doStop() { + final SdkTracerProvider provider = this.provider; + if (provider != null) { + provider.shutdown().join(30L, TimeUnit.SECONDS); + } + } @Override protected void doClose() {} @@ -79,6 +130,28 @@ public void onTaskUnregistered(Task task) { } } + private static SpanProcessor createSpanProcessor(String endpoint, String token) { + SpanProcessor processor = SimpleSpanProcessor.create(CAPTURING_SPAN_EXPORTER); + if (Strings.hasLength(endpoint) == false || Strings.hasLength(token) == false) { + return processor; + } + + final OtlpGrpcSpanExporter exporter = AccessController.doPrivileged( + (PrivilegedAction) () -> OtlpGrpcSpanExporter.builder() + .setEndpoint(endpoint) + .addHeader("Authorization", "Bearer " + token) + .build() + ); + return SpanProcessor.composite( + processor, + AccessController.doPrivileged( + (PrivilegedAction) () -> BatchSpanProcessor.builder(exporter) + .setScheduleDelay(100, TimeUnit.MILLISECONDS) + .build() + ) + ); + } + public static class CapturingSpanExporter implements SpanExporter { private List capturedSpans = new ArrayList<>(); diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..adb23c68ac7a6 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +grant { + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.lang.RuntimePermission "accessDeclaredMembers"; + // required by io.grpc.internal.DnsNameResolver in grpc-core + permission java.net.NetPermission "getProxySelector"; + permission java.lang.RuntimePermission "getClassLoader"; + permission java.net.SocketPermission "*", "connect,resolve"; +}; From 34239d45b0e70d660858c9653a46d7c02f60240d Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 17 Nov 2021 23:04:52 +1100 Subject: [PATCH 05/90] Add Traceable interface (#80788) Task now implements the Traceable interface. This allows other traceable entities to be developed. --- .../elasticsearch/plugins/TracingPlugin.java | 14 ++++++-- .../java/org/elasticsearch/tasks/Task.java | 18 +++++++++- .../org/elasticsearch/tasks/TaskTracer.java | 4 +-- .../elasticsearch/xpack/apm/APMTracer.java | 35 ++++++++++++++----- 4 files changed, 56 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java index cf1860184d57d..02b3cf11b28bd 100644 --- a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java @@ -8,13 +8,21 @@ package org.elasticsearch.plugins; -import org.elasticsearch.tasks.Task; +import java.util.Map; public interface TracingPlugin { + interface Traceable { + String getSpanId(); + + String getSpanName(); + + Map getAttributes(); + } + interface Tracer { - void onTaskRegistered(Task task); + void onTraceStarted(Traceable traceable); - void onTaskUnregistered(Task task); + void onTraceStopped(Traceable traceable); } } diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index c585883de5b35..a10e67f6a37cb 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -20,7 +21,7 @@ /** * Current task information */ -public class Task { +public class Task implements TracingPlugin.Traceable { /** * The request header to mark tasks with specific ids @@ -220,4 +221,19 @@ public TaskResult result(DiscoveryNode node, ActionResponse response) throws IOE throw new IllegalStateException("response has to implement ToXContent to be able to store the results"); } } + + @Override + public String getSpanId() { + return String.valueOf(id); + } + + @Override + public String getSpanName() { + return action; + } + + @Override + public Map getAttributes() { + return Map.of("es.task.id", id); + } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java index 46b55d62dfccc..23c1f12d9529b 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java @@ -31,7 +31,7 @@ public void addTracer(TracingPlugin.Tracer tracer) { public void onTaskRegistered(Task task) { for (TracingPlugin.Tracer tracer : tracers) { try { - tracer.onTaskRegistered(task); + tracer.onTraceStarted(task); } catch (Exception e) { assert false : e; logger.warn( @@ -50,7 +50,7 @@ public void onTaskRegistered(Task task) { public void onTaskUnregistered(Task task) { for (TracingPlugin.Tracer tracer : tracers) { try { - tracer.onTaskUnregistered(task); + tracer.onTraceStopped(task); } catch (Exception e) { assert false : e; logger.warn( diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index e89a76f092db9..864818e89829e 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -10,6 +10,7 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; @@ -35,7 +36,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.TracingPlugin; -import org.elasticsearch.tasks.Task; import java.security.AccessController; import java.security.PrivilegedAction; @@ -53,7 +53,7 @@ public class APMTracer extends AbstractLifecycleComponent implements TracingPlug static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); - private final Map taskSpans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ClusterService clusterService; private final SecureString endpoint; private final SecureString token; @@ -111,20 +111,37 @@ protected void doStop() { protected void doClose() {} @Override - public void onTaskRegistered(Task task) { + public void onTraceStarted(TracingPlugin.Traceable traceable) { final Tracer tracer = this.tracer; if (tracer != null) { - taskSpans.computeIfAbsent(task.getId(), taskId -> { - final Span span = tracer.spanBuilder(task.getAction()).startSpan(); - span.setAttribute("es.task.id", task.getId()); - return span; + spans.computeIfAbsent(traceable.getSpanId(), spanId -> { + final SpanBuilder spanBuilder = tracer.spanBuilder(traceable.getSpanName()); + for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Object value = entry.getValue(); + if (value instanceof String) { + spanBuilder.setAttribute(entry.getKey(), (String) value); + } else if (value instanceof Long) { + spanBuilder.setAttribute(entry.getKey(), (Long) value); + } else if (value instanceof Integer) { + spanBuilder.setAttribute(entry.getKey(), (Integer) value); + } else if (value instanceof Double) { + spanBuilder.setAttribute(entry.getKey(), (Double) value); + } else if (value instanceof Boolean) { + spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + } else { + throw new IllegalArgumentException( + "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" + ); + } + } + return spanBuilder.startSpan(); }); } } @Override - public void onTaskUnregistered(Task task) { - final Span span = taskSpans.remove(task.getId()); + public void onTraceStopped(TracingPlugin.Traceable traceable) { + final Span span = spans.remove(traceable.getSpanId()); if (span != null) { span.end(); } From 2f51e5f6647a0123a4f79a9f354d5d7696a54c96 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Wed, 17 Nov 2021 15:26:51 +0200 Subject: [PATCH 06/90] Capture task span context in thread context to parent nested tasks (#80758) Pass a task's span context via the ThreadContext to parent nested tasks. --- .../common/util/concurrent/ThreadContext.java | 22 +++++ .../java/org/elasticsearch/tasks/Task.java | 6 ++ .../util/concurrent/ThreadContextTests.java | 25 +++++ .../org/elasticsearch/xpack/apm/ApmIT.java | 27 ++++++ .../java/org/elasticsearch/xpack/apm/APM.java | 61 +++++++++++- .../elasticsearch/xpack/apm/APMTracer.java | 95 +++++++++++++++++-- 6 files changed, 227 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index cc251623f9d51..ae350934eab75 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -177,6 +177,28 @@ public StoredContext stashAndMergeHeaders(Map headers) { return () -> threadLocal.set(context); } + /** + * Removes the current context and resets a new context that is a copy of the current one except that the request + * headers do not contain the given headers to remove. The removed context can be restored when closing the returned + * {@link StoredContext}. + * @param headersToRemove the request headers to remove + */ + public StoredContext removeRequestHeaders(Set headersToRemove) { + final ThreadContextStruct context = threadLocal.get(); + Map newRequestHeaders = new HashMap<>(context.requestHeaders); + newRequestHeaders.keySet().removeAll(headersToRemove); + threadLocal.set( + new ThreadContextStruct( + newRequestHeaders, + context.responseHeaders, + context.transientHeaders, + context.isSystemContext, + context.warningHeadersSize + ) + ); + return () -> threadLocal.set(context); + } + /** * Just like {@link #stashContext()} but no default context is set. * @param preserveResponseHeaders if set to true the response headers of the restore thread will be preserved. diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index a10e67f6a37cb..152569521862d 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -32,9 +32,15 @@ public class Task implements TracingPlugin.Traceable { * The request header which is contained in HTTP request. We parse trace.id from it and store it in thread context. * TRACE_PARENT once parsed in RestController.tryAllHandler is not preserved * has to be declared as a header copied over from http request. + * May also be used internally when apm plugin is enabled. */ public static final String TRACE_PARENT = "traceparent"; + /** + * Is used internally to pass the apm trace context between the nodes + */ + public static final String TRACE_STATE = "tracestate"; + /** * Parsed part of traceparent. It is stored in thread context and emitted in logs. * Has to be declared as a header copied over for tasks. diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index a24aa52b96b1d..09e553d17b13c 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -18,6 +18,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -179,6 +180,30 @@ public void testStashAndMerge() { assertEquals("1", threadContext.getHeader("default")); } + public void testRemoveHeaders() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(build); + threadContext.putHeader("h_1", "h_1_value"); + threadContext.putHeader("h_2", "h_2_value"); + threadContext.putHeader("h_3", "h_3_value"); + + threadContext.putTransient("ctx.transient_1", 1); + threadContext.addResponseHeader("resp.header", "baaaam"); + try (ThreadContext.StoredContext ctx = threadContext.removeRequestHeaders(Set.of("h_1", "h_3"))) { + assertThat(threadContext.getHeaders(), equalTo(Map.of("default", "1", "h_2", "h_2_value"))); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.transient_1")); + assertEquals("1", threadContext.getHeader("default")); + assertEquals(1, threadContext.getResponseHeaders().get("resp.header").size()); + assertEquals("baaaam", threadContext.getResponseHeaders().get("resp.header").get(0)); + } + + assertThat(threadContext.getHeaders(), equalTo(Map.of("default", "1", "h_1", "h_1_value", "h_2", "h_2_value", "h_3", "h_3_value"))); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.transient_1")); + assertEquals("1", threadContext.getHeader("default")); + assertEquals(1, threadContext.getResponseHeaders().get("resp.header").size()); + assertEquals("baaaam", threadContext.getResponseHeaders().get("resp.header").get(0)); + } + public void testStoreContext() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index b2fcb6b6b9a4f..16726aa5139c7 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -21,11 +21,14 @@ import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; +import org.junit.After; import java.util.Collection; import java.util.Collections; import java.util.List; +import static java.util.stream.Collectors.toList; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -44,6 +47,11 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).setSecureSettings(secureSettings).build(); } + @After + public void clearRecordedSpans() { + APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + } + public void testModule() { List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(TracingPlugin.class); assertThat(plugins, hasSize(1)); @@ -68,4 +76,23 @@ public void testModule() { } assertTrue(found); } + + public void testRecordsNestedSpans() { + + APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + + client().admin().cluster().prepareListTasks().get(); + + var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + assertThat(parentTasks, hasSize(1)); + var parentTask = parentTasks.get(0); + assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); + + var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + assertThat(childrenTasks, hasSize(internalCluster().size())); + for (SpanData childrenTask : childrenTasks) { + assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); + assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); + } + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 5fbb72c36f4e5..fd7f2ea57e493 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -14,23 +14,35 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportInterceptor; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.function.Supplier; -public class APM extends Plugin implements TracingPlugin { +public class APM extends Plugin implements TracingPlugin, NetworkPlugin { - private final SetOnce tracer = new SetOnce<>(); + public static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT, Task.TRACE_STATE); + + private final SetOnce tracer = new SetOnce<>(); private final Settings settings; public APM(Settings settings) { @@ -51,7 +63,7 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - tracer.set(new APMTracer(settings, clusterService)); + tracer.set(new APMTracer(settings, threadPool, clusterService)); return List.of(tracer.get()); } @@ -59,4 +71,47 @@ public Collection createComponents( public List> getSettings() { return List.of(APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING); } + + public List getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { + return List.of(new TransportInterceptor() { + @Override + public AsyncSender interceptSender(AsyncSender sender) { + return new ApmTransportInterceptor(sender, threadContext); + } + }); + } + + private class ApmTransportInterceptor implements TransportInterceptor.AsyncSender { + + private final TransportInterceptor.AsyncSender sender; + private final ThreadContext threadContext; + + ApmTransportInterceptor(TransportInterceptor.AsyncSender sender, ThreadContext threadContext) { + this.sender = sender; + this.threadContext = threadContext; + } + + @Override + public void sendRequest( + Transport.Connection connection, + String action, + TransportRequest request, + TransportRequestOptions options, + TransportResponseHandler handler + ) { + if (tracer.get() == null) { + sender.sendRequest(connection, action, request, options, handler); + } else { + var headers = tracer.get().getSpanHeadersById(String.valueOf(request.getParentTask().getId())); + if (headers != null) { + try (var ignore = threadContext.removeRequestHeaders(TRACE_HEADERS)) { + threadContext.putHeader(headers); + sender.sendRequest(connection, action, request, options, handler); + } + } else { + sender.sendRequest(connection, action, request, options, handler); + } + } + } + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 864818e89829e..cc8586c0fa5c2 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -13,7 +13,10 @@ import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; @@ -36,15 +39,21 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Queue; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class APMTracer extends AbstractLifecycleComponent implements TracingPlugin.Tracer { @@ -60,10 +69,13 @@ public class APMTracer extends AbstractLifecycleComponent implements TracingPlug private volatile SdkTracerProvider provider; private volatile Tracer tracer; + private volatile OpenTelemetry openTelemetry; + private final ThreadPool threadPool; - public APMTracer(Settings settings, ClusterService clusterService) { + public APMTracer(Settings settings, ThreadPool threadPool, ClusterService clusterService) { this.endpoint = APM_ENDPOINT_SETTING.get(settings); this.token = APM_TOKEN_SETTING.get(settings); + this.threadPool = Objects.requireNonNull(threadPool); this.clusterService = Objects.requireNonNull(clusterService); } @@ -91,12 +103,11 @@ protected void doStart() { .build() ); - OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() + openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(provider) .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) .build(); tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); - tracer.spanBuilder("startup").startSpan().end(); } @Override @@ -113,9 +124,14 @@ protected void doClose() {} @Override public void onTraceStarted(TracingPlugin.Traceable traceable) { final Tracer tracer = this.tracer; - if (tracer != null) { + final OpenTelemetry openTelemetry = this.openTelemetry; + if (openTelemetry != null && tracer != null) { spans.computeIfAbsent(traceable.getSpanId(), spanId -> { final SpanBuilder spanBuilder = tracer.spanBuilder(traceable.getSpanName()); + Context parentContext = getParentSpanContext(openTelemetry); + if (parentContext != null) { + spanBuilder.setParent(parentContext); + } for (Map.Entry entry : traceable.getAttributes().entrySet()) { final Object value = entry.getValue(); if (value instanceof String) { @@ -139,6 +155,40 @@ public void onTraceStarted(TracingPlugin.Traceable traceable) { } } + private Context getParentSpanContext(OpenTelemetry openTelemetry) { + // If we already have a non-root span context that should be the parent + if (Context.current() != Context.root()) { + return Context.current(); + } + + // If not let us check for a parent context in the thread context + String traceParent = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT); + String traceState = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); + if (traceParent != null) { + Map traceContextMap = new HashMap<>(); + // traceparent and tracestate should match the keys used by W3CTraceContextPropagator + traceContextMap.put(Task.TRACE_PARENT, traceParent); + if (traceState != null) { + traceContextMap.put(Task.TRACE_STATE, traceState); + } + return openTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), traceContextMap, new MapKeyGetter()); + } + return null; + } + + public Map getSpanHeadersById(String id) { + var span = spans.get(id); + if (span == null) { + return null; + } + try (Scope scope = span.makeCurrent()) { + Map spanHeaders = new HashMap<>(); + openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); + spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + return spanHeaders; + } + } + @Override public void onTraceStopped(TracingPlugin.Traceable traceable) { final Span span = spans.remove(traceable.getSpanId()); @@ -171,7 +221,7 @@ private static SpanProcessor createSpanProcessor(String endpoint, String token) public static class CapturingSpanExporter implements SpanExporter { - private List capturedSpans = new ArrayList<>(); + private Queue capturedSpans = ConcurrentCollections.newQueue(); public void clear() { capturedSpans.clear(); @@ -181,6 +231,22 @@ public List getCapturedSpans() { return List.copyOf(capturedSpans); } + public Stream findSpan(Predicate predicate) { + return getCapturedSpans().stream().filter(predicate); + } + + public Stream findSpanByName(String name) { + return findSpan(span -> Objects.equals(span.getName(), name)); + } + + public Stream findSpanBySpanId(String spanId) { + return findSpan(span -> Objects.equals(span.getSpanId(), spanId)); + } + + public Stream findSpanByParentSpanId(String parentSpanId) { + return findSpan(span -> Objects.equals(span.getParentSpanId(), parentSpanId)); + } + @Override public CompletableResultCode export(Collection spans) { capturedSpans.addAll(spans); @@ -197,4 +263,21 @@ public CompletableResultCode shutdown() { return CompletableResultCode.ofSuccess(); } } + + private static class MapKeyGetter implements TextMapGetter> { + + @Override + public Iterable keys(Map carrier) { + return carrier.keySet().stream().filter(APMTracer::isSupportedContextKey).collect(Collectors.toSet()); + } + + @Override + public String get(Map carrier, String key) { + return carrier.get(key); + } + } + + private static boolean isSupportedContextKey(String key) { + return APM.TRACE_HEADERS.contains(key); + } } From a23bf664833ce4ea4b16fbec4926d9d5a8d1187c Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 17 Nov 2021 15:16:21 +0000 Subject: [PATCH 07/90] [APM] Add multi-shard search test case (#80792) Adds a test case that creates and populates a couple of multi-shard indices and executes a search against them. --- .../org/elasticsearch/xpack/apm/ApmIT.java | 75 +++++++++++++++++++ .../elasticsearch/xpack/apm/APMTracer.java | 6 +- 2 files changed, 78 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 16726aa5139c7..52ec663c8ee3a 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -10,9 +10,17 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.sdk.trace.data.SpanData; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchTransportService; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.TracingPlugin; @@ -21,13 +29,16 @@ import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; import org.junit.After; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.concurrent.TimeUnit; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -62,6 +73,8 @@ public void testModule() { final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); + APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + taskTracer.onTaskRegistered(testTask); taskTracer.onTaskUnregistered(testTask); @@ -95,4 +108,66 @@ public void testRecordsNestedSpans() { assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); } } + + public void testSearch() throws Exception { + + internalCluster().ensureAtLeastNumDataNodes(2); + final int nodeCount = internalCluster().numDataNodes(); + + assertAcked( + client().admin() + .indices() + .prepareCreate("test-matching") + .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) + ) + ); + + assertAcked( + client().admin() + .indices() + .prepareCreate("test-notmatching") + .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") + .setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) + ) + ); + + ensureGreen("test-matching", "test-notmatching"); + + final String matchingDate = "2021-11-17"; + final String nonMatchingDate = "2021-01-01"; + + final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + for (int i = 0; i < 1000; i++) { + final boolean isMatching = randomBoolean(); + final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); + indexRequestBuilder.setSource( + "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", + XContentType.JSON + ); + bulkRequestBuilder.add(indexRequestBuilder); + } + + assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); + + final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; + spanExporter.clear(); + + client().prepareSearch() + .setQuery(new RangeQueryBuilder("@timestamp").gt("2021-11-01")) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreFilterShardSize(1) + .execute() + .actionGet(10, TimeUnit.SECONDS); + + assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); + assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index cc8586c0fa5c2..d4d7ba1104034 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -81,7 +81,6 @@ public APMTracer(Settings settings, ThreadPool threadPool, ClusterService cluste @Override protected void doStart() { - final String nodeName = clusterService.getNodeName(); final String endpoint = this.endpoint.toString(); final String token = this.token.toString(); @@ -91,7 +90,7 @@ protected void doStart() { Resource.create( Attributes.of( ResourceAttributes.SERVICE_NAME, - nodeName, + clusterService.getClusterName().toString(), ResourceAttributes.SERVICE_VERSION, Version.CURRENT.toString(), ResourceAttributes.DEPLOYMENT_ENVIRONMENT, @@ -114,6 +113,7 @@ protected void doStart() { protected void doStop() { final SdkTracerProvider provider = this.provider; if (provider != null) { + provider.forceFlush().join(10L, TimeUnit.SECONDS); provider.shutdown().join(30L, TimeUnit.SECONDS); } } @@ -221,7 +221,7 @@ private static SpanProcessor createSpanProcessor(String endpoint, String token) public static class CapturingSpanExporter implements SpanExporter { - private Queue capturedSpans = ConcurrentCollections.newQueue(); + private final Queue capturedSpans = ConcurrentCollections.newQueue(); public void clear() { capturedSpans.clear(); From 3a304f2b71e0434c6c050b63dcb1ea2a51dd3bf1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 18 Nov 2021 11:24:32 +0000 Subject: [PATCH 08/90] Remove unused TracingPlugin interface (#80799) --- .../java/org/elasticsearch/node/Node.java | 6 ++-- .../elasticsearch/plugins/TracingPlugin.java | 28 ---------------- .../java/org/elasticsearch/tasks/Task.java | 4 +-- .../org/elasticsearch/tasks/TaskTracer.java | 10 +++--- .../org/elasticsearch/tracing/Traceable.java | 32 +++++++++++++++++++ .../org/elasticsearch/tracing/Tracer.java | 25 +++++++++++++++ .../org/elasticsearch/xpack/apm/ApmIT.java | 3 +- .../java/org/elasticsearch/xpack/apm/APM.java | 3 +- .../elasticsearch/xpack/apm/APMTracer.java | 8 ++--- 9 files changed, 72 insertions(+), 47 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java create mode 100644 server/src/main/java/org/elasticsearch/tracing/Traceable.java create mode 100644 server/src/main/java/org/elasticsearch/tracing/Tracer.java diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b6a71a158ddfd..d3f75d38d2cae 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -147,7 +147,6 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesModule; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; @@ -171,6 +170,7 @@ import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportService; @@ -753,9 +753,7 @@ protected Node( final IndexingPressure indexingLimits = new IndexingPressure(settings); final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - pluginComponents.stream() - .map(c -> c instanceof TracingPlugin.Tracer ? (TracingPlugin.Tracer) c : null) - .forEach(taskTracer::addTracer); + pluginComponents.stream().map(c -> c instanceof Tracer ? (Tracer) c : null).forEach(taskTracer::addTracer); final RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); RepositoriesModule repositoriesModule = new RepositoriesModule( diff --git a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java b/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java deleted file mode 100644 index 02b3cf11b28bd..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/TracingPlugin.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.plugins; - -import java.util.Map; - -public interface TracingPlugin { - - interface Traceable { - String getSpanId(); - - String getSpanName(); - - Map getAttributes(); - } - - interface Tracer { - void onTraceStarted(Traceable traceable); - - void onTraceStopped(Traceable traceable); - } -} diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 152569521862d..e9b6f8500cb38 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.tracing.Traceable; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -21,7 +21,7 @@ /** * Current task information */ -public class Task implements TracingPlugin.Traceable { +public class Task implements Traceable { /** * The request header to mark tasks with specific ids diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java index 23c1f12d9529b..ec631b369a9e9 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.plugins.TracingPlugin; +import org.elasticsearch.tracing.Tracer; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @@ -20,16 +20,16 @@ public class TaskTracer { private static final Logger logger = LogManager.getLogger(); - private final List tracers = new CopyOnWriteArrayList<>(); + private final List tracers = new CopyOnWriteArrayList<>(); - public void addTracer(TracingPlugin.Tracer tracer) { + public void addTracer(Tracer tracer) { if (tracer != null) { tracers.add(tracer); } } public void onTaskRegistered(Task task) { - for (TracingPlugin.Tracer tracer : tracers) { + for (Tracer tracer : tracers) { try { tracer.onTraceStarted(task); } catch (Exception e) { @@ -48,7 +48,7 @@ public void onTaskRegistered(Task task) { } public void onTaskUnregistered(Task task) { - for (TracingPlugin.Tracer tracer : tracers) { + for (Tracer tracer : tracers) { try { tracer.onTraceStopped(task); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/tracing/Traceable.java b/server/src/main/java/org/elasticsearch/tracing/Traceable.java new file mode 100644 index 0000000000000..40ad2b92001e2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/tracing/Traceable.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.tracing; + +import java.util.Map; + +/** + * Something which maps onto a span in a distributed trace. + */ +public interface Traceable { + + /** + * @return a key which uniquely identifies the span. + */ + String getSpanId(); + + /** + * @return the name of the span as seen by the external tracing system (e.g. the action name for a task) + */ + String getSpanName(); + + /** + * @return extra metadata about the span. + */ + Map getAttributes(); +} diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java new file mode 100644 index 0000000000000..dd4454b8e6b62 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.tracing; + +/** + * Represents a distributed tracing system that keeps track of the start and end of various activities in the cluster. + */ +public interface Tracer { + + /** + * Called when the {@link Traceable} activity starts. + */ + void onTraceStarted(Traceable traceable); + + /** + * Called when the {@link Traceable} activity ends. + */ + void onTraceStopped(Traceable traceable); +} diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 52ec663c8ee3a..a2ed6522f3e6e 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskTracer; @@ -64,7 +63,7 @@ public void clearRecordedSpans() { } public void testModule() { - List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(TracingPlugin.class); + List plugins = internalCluster().getMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); assertThat(plugins, hasSize(1)); TransportService transportService = internalCluster().getInstance(TransportService.class); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index fd7f2ea57e493..64227153d4341 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -19,7 +19,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; @@ -38,7 +37,7 @@ import java.util.Set; import java.util.function.Supplier; -public class APM extends Plugin implements TracingPlugin, NetworkPlugin { +public class APM extends Plugin implements NetworkPlugin { public static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT, Task.TRACE_STATE); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index d4d7ba1104034..2846daa2c9fbb 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -38,9 +38,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.plugins.TracingPlugin; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Traceable; import java.security.AccessController; import java.security.PrivilegedAction; @@ -55,7 +55,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class APMTracer extends AbstractLifecycleComponent implements TracingPlugin.Tracer { +public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); @@ -122,7 +122,7 @@ protected void doStop() { protected void doClose() {} @Override - public void onTraceStarted(TracingPlugin.Traceable traceable) { + public void onTraceStarted(Traceable traceable) { final Tracer tracer = this.tracer; final OpenTelemetry openTelemetry = this.openTelemetry; if (openTelemetry != null && tracer != null) { @@ -190,7 +190,7 @@ public Map getSpanHeadersById(String id) { } @Override - public void onTraceStopped(TracingPlugin.Traceable traceable) { + public void onTraceStopped(Traceable traceable) { final Span span = spans.remove(traceable.getSpanId()); if (span != null) { span.end(); From 8bd8a2418d76805a8eca940defca3b5635eaa759 Mon Sep 17 00:00:00 2001 From: Sylvain Juge Date: Thu, 18 Nov 2021 17:02:15 +0100 Subject: [PATCH 09/90] single service + few attributes --- .../java/org/elasticsearch/tasks/Task.java | 10 +++++++++- .../elasticsearch/xpack/apm/APMTracer.java | 19 ++++++++++++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index e9b6f8500cb38..43a8f3278fba6 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import java.io.IOException; +import java.util.HashMap; import java.util.Map; /** @@ -240,6 +241,13 @@ public String getSpanName() { @Override public Map getAttributes() { - return Map.of("es.task.id", id); + + TaskId parentTask = getParentTaskId(); + Map attributes = new HashMap<>(); + attributes.put(TracingPlugin.AttributeKeys.TASK_ID, id); + if (parentTask.isSet()) { + attributes.put(TracingPlugin.AttributeKeys.PARENT_TASK_ID, parentTask.toString()); + } + return attributes; } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 2846daa2c9fbb..bcb2a774c2ea1 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -90,7 +90,11 @@ protected void doStart() { Resource.create( Attributes.of( ResourceAttributes.SERVICE_NAME, - clusterService.getClusterName().toString(), + "elasticsearch", + ResourceAttributes.SERVICE_NAMESPACE, + clusterService.getClusterName().value(), + ResourceAttributes.SERVICE_INSTANCE_ID, + clusterService.getNodeName(), ResourceAttributes.SERVICE_VERSION, Version.CURRENT.toString(), ResourceAttributes.DEPLOYMENT_ENVIRONMENT, @@ -132,6 +136,7 @@ public void onTraceStarted(Traceable traceable) { if (parentContext != null) { spanBuilder.setParent(parentContext); } + for (Map.Entry entry : traceable.getAttributes().entrySet()) { final Object value = entry.getValue(); if (value instanceof String) { @@ -150,6 +155,18 @@ public void onTraceStarted(Traceable traceable) { ); } } + + // hack transactions to avoid the 'custom' type + spanBuilder.setAttribute("type", "elasticsearch"); + + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute("messaging.system", "elasticsearch"); + spanBuilder.setAttribute("messaging.destination", clusterService.getNodeName()); + + spanBuilder.setAttribute(TracingPlugin.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(TracingPlugin.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); + return spanBuilder.startSpan(); }); } From 110bb00de6b2c4c6735fc2ec85f1c2fbcd966771 Mon Sep 17 00:00:00 2001 From: Sylvain Juge Date: Thu, 18 Nov 2021 17:11:18 +0100 Subject: [PATCH 10/90] tune a few minor things --- server/src/main/java/org/elasticsearch/tasks/Task.java | 4 ++-- .../src/main/java/org/elasticsearch/tracing/Traceable.java | 7 +++++++ .../main/java/org/elasticsearch/xpack/apm/APMTracer.java | 6 ++++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 43a8f3278fba6..8521642ee4936 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -244,9 +244,9 @@ public Map getAttributes() { TaskId parentTask = getParentTaskId(); Map attributes = new HashMap<>(); - attributes.put(TracingPlugin.AttributeKeys.TASK_ID, id); + attributes.put(Traceable.AttributeKeys.TASK_ID, id); if (parentTask.isSet()) { - attributes.put(TracingPlugin.AttributeKeys.PARENT_TASK_ID, parentTask.toString()); + attributes.put(Traceable.AttributeKeys.PARENT_TASK_ID, parentTask.toString()); } return attributes; } diff --git a/server/src/main/java/org/elasticsearch/tracing/Traceable.java b/server/src/main/java/org/elasticsearch/tracing/Traceable.java index 40ad2b92001e2..9eb06c56b0dbe 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Traceable.java +++ b/server/src/main/java/org/elasticsearch/tracing/Traceable.java @@ -29,4 +29,11 @@ public interface Traceable { * @return extra metadata about the span. */ Map getAttributes(); + + interface AttributeKeys { + String TASK_ID = "es.task.id"; + String PARENT_TASK_ID = "es.task.parent.id"; + String CLUSTER_NAME = "es.cluster.name"; + String NODE_NAME = "es.node.name"; + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index bcb2a774c2ea1..6424de4e8cbca 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -164,8 +164,10 @@ public void onTraceStarted(Traceable traceable) { spanBuilder.setAttribute("messaging.system", "elasticsearch"); spanBuilder.setAttribute("messaging.destination", clusterService.getNodeName()); - spanBuilder.setAttribute(TracingPlugin.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(TracingPlugin.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); + // this will duplicate the "resource attributes" that are defined globally + // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in 7.16. + spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); return spanBuilder.startSpan(); }); From 4a1a899a0afcf97cafa3aa3653a168dcebd29d89 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Thu, 18 Nov 2021 18:05:56 +0100 Subject: [PATCH 11/90] adding dynamic setting `xpack.apm.tracing.enabled` (#80796) --- .../org/elasticsearch/xpack/apm/ApmIT.java | 38 +++- .../java/org/elasticsearch/xpack/apm/APM.java | 25 +-- .../elasticsearch/xpack/apm/APMTracer.java | 164 ++++++++++++------ 3 files changed, 164 insertions(+), 63 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index a2ed6522f3e6e..be1b1cf5b09ac 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -10,6 +10,7 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.sdk.trace.data.SpanData; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchAction; @@ -38,6 +39,7 @@ import static java.util.stream.Collectors.toList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -54,7 +56,12 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(APMTracer.APM_ENDPOINT_SETTING.getKey(), System.getProperty("tests.apm.endpoint", "")); secureSettings.setString(APMTracer.APM_TOKEN_SETTING.getKey(), System.getProperty("tests.apm.token", "")); - return Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)).setSecureSettings(secureSettings).build(); + + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(APMTracer.APM_ENABLED_SETTING.getKey(), true) + .setSecureSettings(secureSettings) + .build(); } @After @@ -169,4 +176,33 @@ public void testSearch() throws Exception { assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); } + + public void testDoesNotRecordSpansWhenDisabled() { + + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() + ) + ) + .actionGet(); + + try { + APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + + client().admin().cluster().prepareListTasks().get(); + + assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); + } finally { + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() + ) + ) + .actionGet(); + } + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 64227153d4341..624c89a543847 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -68,7 +68,7 @@ public Collection createComponents( @Override public List> getSettings() { - return List.of(APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING); + return List.of(APMTracer.APM_ENABLED_SETTING, APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING); } public List getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { @@ -98,18 +98,19 @@ public void sendRequest( TransportRequestOptions options, TransportResponseHandler handler ) { - if (tracer.get() == null) { + var aTracer = tracer.get(); + if (aTracer == null || aTracer.isEnabled() == false) { + sender.sendRequest(connection, action, request, options, handler); + return; + } + var headers = aTracer.getSpanHeadersById(String.valueOf(request.getParentTask().getId())); + if (headers == null) { + sender.sendRequest(connection, action, request, options, handler); + return; + } + try (var ignore = threadContext.removeRequestHeaders(TRACE_HEADERS)) { + threadContext.putHeader(headers); sender.sendRequest(connection, action, request, options, handler); - } else { - var headers = tracer.get().getSpanHeadersById(String.valueOf(request.getParentTask().getId())); - if (headers != null) { - try (var ignore = threadContext.removeRequestHeaders(TRACE_HEADERS)) { - threadContext.putHeader(headers); - sender.sendRequest(connection, action, request, options, handler); - } - } else { - sender.sendRequest(connection, action, request, options, handler); - } } } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 2846daa2c9fbb..56de34778ae86 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -50,41 +50,102 @@ import java.util.Map; import java.util.Objects; import java.util.Queue; +import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.common.settings.Setting.Property.Dynamic; +import static org.elasticsearch.common.settings.Setting.Property.NodeScope; + public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); + static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); + private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; private final SecureString token; - private volatile SdkTracerProvider provider; - private volatile Tracer tracer; - private volatile OpenTelemetry openTelemetry; - private final ThreadPool threadPool; + private volatile boolean enabled; + private volatile APMServices services; + + /** This class is required to make all open telemetry services visible at once */ + private static class APMServices { + private final SdkTracerProvider provider; + private final Tracer tracer; + private final OpenTelemetry openTelemetry; + + private APMServices(SdkTracerProvider provider, Tracer tracer, OpenTelemetry openTelemetry) { + this.provider = provider; + this.tracer = tracer; + this.openTelemetry = openTelemetry; + } + } public APMTracer(Settings settings, ThreadPool threadPool, ClusterService clusterService) { - this.endpoint = APM_ENDPOINT_SETTING.get(settings); - this.token = APM_TOKEN_SETTING.get(settings); this.threadPool = Objects.requireNonNull(threadPool); this.clusterService = Objects.requireNonNull(clusterService); + this.endpoint = APM_ENDPOINT_SETTING.get(settings); + this.token = APM_TOKEN_SETTING.get(settings); + this.enabled = APM_ENABLED_SETTING.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); + } + + public boolean isEnabled() { + return enabled; + } + + private void setEnabled(boolean enabled) { + this.enabled = enabled; + if (enabled) { + createApmServices(); + } else { + destroyApmServices(); + } } @Override protected void doStart() { + if (enabled) { + createApmServices(); + } + } + + @Override + protected void doStop() { + destroyApmServices(); + try { + shutdownPermits.tryAcquire(Integer.MAX_VALUE, 30L, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + @Override + protected void doClose() { + + } + + private void createApmServices() { + assert enabled; + + var acquired = shutdownPermits.tryAcquire(); + if (acquired == false) { + return;// doStop() is already executed + } + final String endpoint = this.endpoint.toString(); final String token = this.token.toString(); - this.provider = AccessController.doPrivileged( + var provider = AccessController.doPrivileged( (PrivilegedAction) () -> SdkTracerProvider.builder() .setResource( Resource.create( @@ -102,57 +163,59 @@ protected void doStart() { .build() ); - openTelemetry = OpenTelemetrySdk.builder() + var openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(provider) .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) .build(); - tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); + var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); + + assert this.services == null; + this.services = new APMServices(provider, tracer, openTelemetry); } - @Override - protected void doStop() { - final SdkTracerProvider provider = this.provider; - if (provider != null) { - provider.forceFlush().join(10L, TimeUnit.SECONDS); - provider.shutdown().join(30L, TimeUnit.SECONDS); + private void destroyApmServices() { + var services = this.services; + this.services = null; + if (services == null) { + return; } + spans.clear();// discard in-flight spans + services.provider.shutdown().whenComplete(shutdownPermits::release); } - @Override - protected void doClose() {} - @Override public void onTraceStarted(Traceable traceable) { - final Tracer tracer = this.tracer; - final OpenTelemetry openTelemetry = this.openTelemetry; - if (openTelemetry != null && tracer != null) { - spans.computeIfAbsent(traceable.getSpanId(), spanId -> { - final SpanBuilder spanBuilder = tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(openTelemetry); - if (parentContext != null) { - spanBuilder.setParent(parentContext); - } - for (Map.Entry entry : traceable.getAttributes().entrySet()) { - final Object value = entry.getValue(); - if (value instanceof String) { - spanBuilder.setAttribute(entry.getKey(), (String) value); - } else if (value instanceof Long) { - spanBuilder.setAttribute(entry.getKey(), (Long) value); - } else if (value instanceof Integer) { - spanBuilder.setAttribute(entry.getKey(), (Integer) value); - } else if (value instanceof Double) { - spanBuilder.setAttribute(entry.getKey(), (Double) value); - } else if (value instanceof Boolean) { - spanBuilder.setAttribute(entry.getKey(), (Boolean) value); - } else { - throw new IllegalArgumentException( - "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" - ); - } - } - return spanBuilder.startSpan(); - }); + var services = this.services; + if (services == null) { + return; } + spans.computeIfAbsent(traceable.getSpanId(), spanId -> { + // services might be in shutdown sate by this point, but this is handled by the open telemetry internally + final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); + Context parentContext = getParentSpanContext(services.openTelemetry); + if (parentContext != null) { + spanBuilder.setParent(parentContext); + } + for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Object value = entry.getValue(); + if (value instanceof String) { + spanBuilder.setAttribute(entry.getKey(), (String) value); + } else if (value instanceof Long) { + spanBuilder.setAttribute(entry.getKey(), (Long) value); + } else if (value instanceof Integer) { + spanBuilder.setAttribute(entry.getKey(), (Integer) value); + } else if (value instanceof Double) { + spanBuilder.setAttribute(entry.getKey(), (Double) value); + } else if (value instanceof Boolean) { + spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + } else { + throw new IllegalArgumentException( + "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" + ); + } + } + return spanBuilder.startSpan(); + }); } private Context getParentSpanContext(OpenTelemetry openTelemetry) { @@ -177,13 +240,14 @@ private Context getParentSpanContext(OpenTelemetry openTelemetry) { } public Map getSpanHeadersById(String id) { + var services = this.services; var span = spans.get(id); - if (span == null) { + if (span == null || services == null) { return null; } - try (Scope scope = span.makeCurrent()) { + try (Scope ignore = span.makeCurrent()) { Map spanHeaders = new HashMap<>(); - openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); + services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); return spanHeaders; } From 9f68a26bac48369da1d54af7e98269613a24b4e2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 18 Nov 2021 17:35:08 +0000 Subject: [PATCH 12/90] Spotless --- .../java/org/elasticsearch/tasks/Task.java | 2 +- .../elasticsearch/xpack/apm/APMTracer.java | 25 ++++++++++--------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 8521642ee4936..b0fc78d1915f2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -243,7 +243,7 @@ public String getSpanName() { public Map getAttributes() { TaskId parentTask = getParentTaskId(); - Map attributes = new HashMap<>(); + Map attributes = new HashMap<>(); attributes.put(Traceable.AttributeKeys.TASK_ID, id); if (parentTask.isSet()) { attributes.put(Traceable.AttributeKeys.PARENT_TASK_ID, parentTask.toString()); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 83737e79494be..4f21ba87d6999 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -220,18 +220,19 @@ public void onTraceStarted(Traceable traceable) { } } - // hack transactions to avoid the 'custom' type - spanBuilder.setAttribute("type", "elasticsearch"); - - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map - spanBuilder.setAttribute("messaging.system", "elasticsearch"); - spanBuilder.setAttribute("messaging.destination", clusterService.getNodeName()); - - // this will duplicate the "resource attributes" that are defined globally - // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in 7.16. - spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); + // hack transactions to avoid the 'custom' type + spanBuilder.setAttribute("type", "elasticsearch"); + + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute("messaging.system", "elasticsearch"); + spanBuilder.setAttribute("messaging.destination", clusterService.getNodeName()); + + // this will duplicate the "resource attributes" that are defined globally + // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in + // 7.16. + spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); return spanBuilder.startSpan(); }); From f3f9835b0701d4e111a8d08221a3036f0a4fdbc2 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 19 Nov 2021 15:13:00 +1100 Subject: [PATCH 13/90] Add tracing for authorization (#80815) * Can be turned off with xpack.security.authz.tracing: false * AuthZ is tied to relevant task by traceparent (it however does not cover the first authZ) * x-opaque-id is auto configured at rest layer if not already exists. This helps chain all relevant actions together. * ApmIT now has security enabled. --- .../java/org/elasticsearch/node/Node.java | 9 ++- .../org/elasticsearch/plugins/Plugin.java | 6 ++ .../org/elasticsearch/tracing/Tracer.java | 7 ++ x-pack/plugin/apm-integration/build.gradle | 3 + .../org/elasticsearch/xpack/apm/ApmIT.java | 60 ++++++++++----- .../elasticsearch/xpack/apm/APMTracer.java | 5 ++ .../xpack/security/AuthorizationTracer.java | 73 +++++++++++++++++++ .../xpack/security/Security.java | 15 +++- .../security/authz/AuthorizationService.java | 57 ++++++++++++++- .../security/rest/SecurityRestFilter.java | 9 +++ .../xpack/security/LocalStateSecurity.java | 6 ++ .../authz/AuthorizationServiceTests.java | 13 +++- 12 files changed, 237 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index d3f75d38d2cae..2ed7202bc8611 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -195,6 +195,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -753,7 +754,13 @@ protected Node( final IndexingPressure indexingLimits = new IndexingPressure(settings); final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - pluginComponents.stream().map(c -> c instanceof Tracer ? (Tracer) c : null).forEach(taskTracer::addTracer); + final List tracers = pluginComponents.stream() + .map(c -> c instanceof Tracer ? (Tracer) c : null) + .filter(Objects::nonNull) + .collect(Collectors.toUnmodifiableList()); + tracers.forEach(taskTracer::addTracer); + + pluginsService.filterPlugins(Plugin.class).forEach(plugin -> plugin.onTracers(tracers)); final RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); RepositoriesModule repositoriesModule = new RepositoriesModule( diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 9d784c3ba49c8..26d69eeeb1293 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -27,6 +27,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; @@ -204,4 +205,9 @@ public void close() throws IOException { public Collection getAdditionalIndexSettingProviders() { return Collections.emptyList(); } + + /** + * Called with a list of Tracers so that each plugin can have a chance to work with them. + */ + public void onTracers(List tracers) {} } diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index dd4454b8e6b62..bba75825681a5 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -8,6 +8,8 @@ package org.elasticsearch.tracing; +import java.util.Map; + /** * Represents a distributed tracing system that keeps track of the start and end of various activities in the cluster. */ @@ -22,4 +24,9 @@ public interface Tracer { * Called when the {@link Traceable} activity ends. */ void onTraceStopped(Traceable traceable); + + /** + * Retrieve context related headers for the span of the given id. + */ + Map getSpanHeadersById(String id); } diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 2616d09aa15b4..2fa3c336e2334 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -56,6 +56,9 @@ dependencies { compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) + internalClusterTestImplementation(testArtifact(project(xpackModule('security')))) { + exclude group: 'com.google.guava', module: 'guava' + } } // no unit-test for now diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index be1b1cf5b09ac..b54a54a2781d2 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -15,21 +15,25 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchTransportService; -import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskTracer; -import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.SecuritySettingsSource; +import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.junit.After; import java.util.Collection; @@ -44,7 +48,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; -public class ApmIT extends ESIntegTestCase { +public class ApmIT extends SecurityIntegTestCase { @Override protected Collection> nodePlugins() { @@ -53,15 +57,22 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(APMTracer.APM_ENDPOINT_SETTING.getKey(), System.getProperty("tests.apm.endpoint", "")); - secureSettings.setString(APMTracer.APM_TOKEN_SETTING.getKey(), System.getProperty("tests.apm.token", "")); - - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put(APMTracer.APM_ENABLED_SETTING.getKey(), true) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); + ((MockSecureSettings) builder.getSecureSettings()).setString( + APMTracer.APM_ENDPOINT_SETTING.getKey(), + System.getProperty("tests.apm.endpoint", "") + ); + ((MockSecureSettings) builder.getSecureSettings()).setString( + APMTracer.APM_TOKEN_SETTING.getKey(), + System.getProperty("tests.apm.token", "") + ); + builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); + return builder.build(); + } + + @Override + protected boolean addMockHttpTransport() { + return false; } @After @@ -166,12 +177,23 @@ public void testSearch() throws Exception { final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; spanExporter.clear(); - client().prepareSearch() - .setQuery(new RangeQueryBuilder("@timestamp").gt("2021-11-01")) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreFilterShardSize(1) - .execute() - .actionGet(10, TimeUnit.SECONDS); + final Request searchRequest = new Request("GET", "_search"); + searchRequest.addParameter("search_type", "query_then_fetch"); + searchRequest.addParameter("pre_filter_shard_size", "1"); + searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); + searchRequest.setOptions( + searchRequest.getOptions() + .toBuilder() + .addHeader( + "Authorization", + UsernamePasswordToken.basicAuthHeaderValue( + SecuritySettingsSource.TEST_USER_NAME, + new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) + ) + ) + ); + + final Response searchResponse = getRestClient().performRequest(searchRequest); assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 56de34778ae86..7aa777fc5a504 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -214,6 +214,10 @@ public void onTraceStarted(Traceable traceable) { ); } } + final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID); + if (xOpaqueId != null) { + spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); + } return spanBuilder.startSpan(); }); } @@ -239,6 +243,7 @@ private Context getParentSpanContext(OpenTelemetry openTelemetry) { return null; } + @Override public Map getSpanHeadersById(String id) { var services = this.services; var span = spans.get(id); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java new file mode 100644 index 0000000000000..b1fffbc0986c2 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tracing.Traceable; +import org.elasticsearch.tracing.Tracer; + +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +public class AuthorizationTracer { + + private static final Logger logger = LogManager.getLogger(AuthorizationTracer.class); + + private final ThreadContext threadContext; + private final List tracers = new CopyOnWriteArrayList<>(); + + public AuthorizationTracer(ThreadContext threadContext) { + this.threadContext = threadContext; + } + + public void addTracer(Tracer tracer) { + if (tracer != null) { + tracers.add(tracer); + } + } + + public Runnable startTracing(Traceable traceable) { + for (Tracer tracer : tracers) { + try { + tracer.onTraceStarted(traceable); + } catch (Exception e) { + assert false : e; + logger.warn( + new ParameterizedMessage( + "authorization tracing listener [{}] failed on starting tracing of [{}][{}]", + tracer, + traceable.getSpanId(), + traceable.getSpanName() + ), + e + ); + } + } + return () -> { + for (Tracer tracer : tracers) { + try { + tracer.onTraceStopped(traceable); + } catch (Exception e) { + assert false : e; + logger.warn( + new ParameterizedMessage( + "authorization tracing listener [{}] failed on stopping tracing of [{}][{}]", + tracer, + traceable.getSpanId(), + traceable.getSpanName() + ), + e + ); + } + } + }; + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index f125fd186592e..9f18b81b870ce 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -75,6 +75,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; @@ -470,6 +471,7 @@ public class Security extends Plugin private final List securityExtensions = new ArrayList<>(); private final SetOnce transportReference = new SetOnce<>(); private final SetOnce scriptServiceReference = new SetOnce<>(); + private final SetOnce authorizationTracerReference = new SetOnce<>(); public Security(Settings settings, final Path configPath) { this(settings, configPath, Collections.emptyList()); @@ -810,6 +812,7 @@ Collection createComponents( } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); + authorizationTracerReference.set(new AuthorizationTracer(threadContext.get())); final AuthorizationService authzService = new AuthorizationService( settings, allRolesStore, @@ -822,7 +825,8 @@ Collection createComponents( requestInterceptors, getLicenseState(), expressionResolver, - operatorPrivilegesService + operatorPrivilegesService, + authorizationTracerReference.get() ); components.add(nativeRolesStore); // used by roles actions @@ -1602,6 +1606,15 @@ public void loadExtensions(ExtensionLoader loader) { securityExtensions.addAll(loader.loadExtensions(SecurityExtension.class)); } + @Override + public void onTracers(List tracers) { + if (authorizationTracerReference.get() == null) { + // security is disabled + return; + } + tracers.forEach(t -> authorizationTracerReference.get().addTracer(t)); + } + private synchronized NioGroupFactory getNioGroupFactory(Settings settings) { if (nioGroupFactory.get() != null) { assert nioGroupFactory.get().getSettings().equals(settings) : "Different settings than originally provided"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index bb271f1098da7..41a29162da9f1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Traceable; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.MigrateToDataStreamAction; @@ -71,6 +72,7 @@ import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.AuthorizationTracer; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; @@ -111,6 +113,7 @@ public class AuthorizationService { true, Property.NodeScope ); + public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), true, Property.NodeScope); private static final AuthorizationInfo SYSTEM_AUTHZ_INFO = () -> Collections.singletonMap( PRINCIPAL_ROLES_FIELD_NAME, new String[] { SystemUser.ROLE_NAME } @@ -132,6 +135,8 @@ public class AuthorizationService { private final Set requestInterceptors; private final XPackLicenseState licenseState; private final OperatorPrivilegesService operatorPrivilegesService; + private final AuthorizationTracer authorizationTracer; + private final boolean tracingEnabled; private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; @@ -147,7 +152,8 @@ public AuthorizationService( Set requestInterceptors, XPackLicenseState licenseState, IndexNameExpressionResolver resolver, - OperatorPrivilegesService operatorPrivilegesService + OperatorPrivilegesService operatorPrivilegesService, + AuthorizationTracer authorizationTracer ) { this.clusterService = clusterService; this.auditTrailService = auditTrailService; @@ -163,6 +169,8 @@ public AuthorizationService( this.settings = settings; this.licenseState = licenseState; this.operatorPrivilegesService = operatorPrivilegesService; + this.authorizationTracer = authorizationTracer; + this.tracingEnabled = TRACE_AUTHORIZATION.get(settings); } public void checkPrivileges( @@ -211,6 +219,8 @@ public void authorize( final AuthorizationContext enclosingContext = extractAuthorizationContext(threadContext, action); + final Runnable tracer = maybeStartTracing(enclosingContext, authentication, action, originalRequest); + /* authorization fills in certain transient headers, which must be observed in the listener (action handler execution) * as well, but which must not bleed across different action context (eg parent-child action contexts). *

@@ -256,6 +266,8 @@ public void authorize( }, listener::onFailure), threadContext); engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); } + } finally { + tracer.run(); } } @@ -309,6 +321,48 @@ private static ElasticsearchSecurityException internalError(String message) { return new ElasticsearchSecurityException(message); } + private Runnable maybeStartTracing( + AuthorizationContext enclosingContext, + Authentication authentication, + String action, + TransportRequest originalRequest + ) { + // Not tracing system actions + if (false == tracingEnabled || SystemUser.is(authentication.getUser())) { + return () -> {}; + } else { + return authorizationTracer.startTracing(new Traceable() { + @Override + public String getSpanId() { + return "authorize_" + System.identityHashCode(originalRequest); + } + + @Override + public String getSpanName() { + return "authorize(" + action + ")"; + } + + @Override + public Map getAttributes() { + final HashMap attributes = new HashMap<>( + Map.of( + "es.principal", + authentication.getUser().principal(), + "es.authentication.realm.name", + authentication.getAuthenticatedBy().getName(), + "es.node.name", + clusterService.getNodeName() + ) + ); + if (enclosingContext != null) { + attributes.put("originating_action", enclosingContext.getAction()); + } + return Map.copyOf(attributes); + } + }); + } + } + private void checkOperatorPrivileges(Authentication authentication, String action, TransportRequest originalRequest) throws ElasticsearchSecurityException { // Check operator privileges @@ -1074,5 +1128,6 @@ public void done(Collection indices) { public static void addSettings(List> settings) { settings.add(ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING); + settings.add(TRACE_AUTHORIZATION); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index 22e53ecfd2511..214a7f0ab5aaa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -24,9 +25,11 @@ import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.RestRequestFilter; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.MediaType; import org.elasticsearch.xcontent.MediaTypeRegistry; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.transport.SSLEngineUtils; @@ -111,6 +114,12 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } RemoteHostHeader.process(request, threadContext); try { + // Populate x-opaque-id if not already exists to chain all related actions together + if (authentication != null && false == SystemUser.is(authentication.getUser())) { + if (threadContext.getHeader(Task.X_OPAQUE_ID) == null) { + threadContext.putHeader(Task.X_OPAQUE_ID, UUIDs.base64UUID()); + } + } restHandler.handleRequest(request, channel, client); } catch (Exception e) { handleException(ActionType.RequestHandling, request, channel, e); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java index 9946f5d2a2c49..d76643c50688b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java @@ -19,6 +19,7 @@ import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; @@ -112,4 +113,9 @@ protected Class protected Class> getInfoAction() { return SecurityTransportXPackInfoAction.class; } + + @Override + public void onTracers(List tracers) { + plugins.forEach(plugin -> plugin.onTracers(tracers)); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 7ebb5dfb7491e..d51923951e960 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -155,6 +155,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; +import org.elasticsearch.xpack.security.AuthorizationTracer; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; @@ -287,7 +288,8 @@ public void setup() { Collections.emptySet(), licenseState, TestIndexNameExpressionResolver.newInstance(), - operatorPrivilegesService + operatorPrivilegesService, + new AuthorizationTracer(threadContext) ); } @@ -1561,7 +1563,8 @@ public void testDenialForAnonymousUser() throws IOException { Collections.emptySet(), new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), - operatorPrivilegesService + operatorPrivilegesService, + new AuthorizationTracer(threadContext) ); RoleDescriptor role = new RoleDescriptor( @@ -1607,7 +1610,8 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() throws IO Collections.emptySet(), new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), - operatorPrivilegesService + operatorPrivilegesService, + new AuthorizationTracer(threadContext) ); RoleDescriptor role = new RoleDescriptor( @@ -2723,7 +2727,8 @@ public void getUserPrivileges( Collections.emptySet(), licenseState, TestIndexNameExpressionResolver.newInstance(), - operatorPrivilegesService + operatorPrivilegesService, + new AuthorizationTracer(threadContext) ); Authentication authentication; try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { From ed6223c68d68ec8e08ba98b54318919414d9c416 Mon Sep 17 00:00:00 2001 From: Sylvain Juge Date: Fri, 19 Nov 2021 10:49:09 +0100 Subject: [PATCH 14/90] use otel sem attributes when we can --- .../main/java/org/elasticsearch/xpack/apm/APMTracer.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 7f73c242cdda7..140657211fec9 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -29,6 +29,8 @@ import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; + import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -221,12 +223,13 @@ public void onTraceStarted(Traceable traceable) { } // hack transactions to avoid the 'custom' type + // this one is not part of OTel semantic attributes spanBuilder.setAttribute("type", "elasticsearch"); // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch // also allows to set destination resource name in map - spanBuilder.setAttribute("messaging.system", "elasticsearch"); - spanBuilder.setAttribute("messaging.destination", clusterService.getNodeName()); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in From d95c63432f4f013eb5e717b7d1a43dd8a55ccaec Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 22 Nov 2021 10:11:47 +0000 Subject: [PATCH 15/90] Trace recoveries and cluster state updates (#80875) --- .../cluster/ClusterStatePublicationEvent.java | 9 ++ .../PublicationTransportHandler.java | 9 +- .../cluster/service/ClusterService.java | 5 + .../cluster/service/MasterService.java | 94 ++++++++++++------- .../common/util/concurrent/ThreadContext.java | 6 +- .../recovery/PeerRecoverySourceService.java | 21 +++-- .../recovery/RemoteRecoveryTargetHandler.java | 9 +- .../java/org/elasticsearch/node/Node.java | 2 + .../cluster/coordination/NodeJoinTests.java | 1 + .../PublicationTransportHandlerTests.java | 24 ++++- .../cluster/service/MasterServiceTests.java | 4 + .../PeerRecoverySourceServiceTests.java | 24 ++++- .../snapshots/SnapshotResiliencyTests.java | 1 + .../AbstractCoordinatorTestCase.java | 1 + .../test/ClusterServiceUtils.java | 2 + .../FakeThreadPoolMasterServiceTests.java | 2 + .../org/elasticsearch/xpack/apm/ApmIT.java | 67 +++++++++++++ .../java/org/elasticsearch/xpack/apm/APM.java | 27 ++++-- .../elasticsearch/xpack/apm/APMTracer.java | 6 +- .../security/authz/AuthorizationService.java | 2 +- 20 files changed, 250 insertions(+), 66 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStatePublicationEvent.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStatePublicationEvent.java index e306787dfbb4a..758579ebf0222 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStatePublicationEvent.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStatePublicationEvent.java @@ -8,6 +8,8 @@ package org.elasticsearch.cluster; +import org.elasticsearch.tasks.Task; + /** * Represents a cluster state update computed by the {@link org.elasticsearch.cluster.service.MasterService} for publication to the cluster. * If publication is successful then this creates a {@link ClusterChangedEvent} which is applied on every node. @@ -22,6 +24,7 @@ public class ClusterStatePublicationEvent { private final String summary; private final ClusterState oldState; private final ClusterState newState; + private final Task task; private final long computationTimeMillis; private final long publicationStartTimeMillis; private volatile long publicationContextConstructionElapsedMillis = NOT_SET; @@ -33,12 +36,14 @@ public ClusterStatePublicationEvent( String summary, ClusterState oldState, ClusterState newState, + Task task, long computationTimeMillis, long publicationStartTimeMillis ) { this.summary = summary; this.oldState = oldState; this.newState = newState; + this.task = task; this.computationTimeMillis = computationTimeMillis; this.publicationStartTimeMillis = publicationStartTimeMillis; } @@ -55,6 +60,10 @@ public ClusterState getNewState() { return newState; } + public Task getTask() { + return task; + } + public long getComputationTimeMillis() { return computationTimeMillis; } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java index 04afeabc360e2..203bcd543df61 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java @@ -36,6 +36,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.TransportException; @@ -300,6 +301,7 @@ public class PublicationContext extends AbstractRefCounted { private final DiscoveryNodes discoveryNodes; private final ClusterState newState; private final ClusterState previousState; + private final Task task; private final boolean sendFullVersion; // All the values of these maps have one ref for the context (while it's open) and one for each in-flight message. @@ -310,6 +312,7 @@ public class PublicationContext extends AbstractRefCounted { discoveryNodes = clusterStatePublicationEvent.getNewState().nodes(); newState = clusterStatePublicationEvent.getNewState(); previousState = clusterStatePublicationEvent.getOldState(); + task = clusterStatePublicationEvent.getTask(); sendFullVersion = previousState.getBlocks().disableStatePersistence(); } @@ -376,10 +379,11 @@ public void sendApplyCommit( ActionListener listener ) { assert transportService.getThreadPool().getThreadContext().isSystemContext(); - transportService.sendRequest( + transportService.sendChildRequest( destination, COMMIT_STATE_ACTION_NAME, applyCommitRequest, + task, STATE_REQUEST_OPTIONS, new ActionListenerResponseHandler<>(listener, in -> TransportResponse.Empty.INSTANCE, ThreadPool.Names.GENERIC) ); @@ -450,10 +454,11 @@ private void sendClusterState( return; } try { - transportService.sendRequest( + transportService.sendChildRequest( destination, PUBLISH_STATE_ACTION_NAME, new BytesTransportRequest(bytes, destination.getVersion()), + task, STATE_REQUEST_OPTIONS, new ActionListenerResponseHandler( ActionListener.runAfter(listener, bytes::decRef), diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 1f003496d4d70..4c88a31431c17 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; @@ -289,4 +290,8 @@ public void submitStateUpdateTasks( ) { masterService.submitStateUpdateTasks(source, tasks, config, executor); } + + public void setTaskManager(TaskManager taskManager) { + masterService.setTaskManager(taskManager); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index ec2508fef7a76..a54e7aec2c3bb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -41,6 +41,10 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskAwareRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; @@ -75,6 +79,8 @@ public class MasterService extends AbstractLifecycleComponent { static final String MASTER_UPDATE_THREAD_NAME = "masterService#updateTask"; + public static final String STATE_UPDATE_ACTION_NAME = "internal:cluster/coordination/update_state"; + ClusterStatePublisher clusterStatePublisher; private final String nodeName; @@ -85,6 +91,7 @@ public class MasterService extends AbstractLifecycleComponent { private final TimeValue starvationLoggingThreshold; protected final ThreadPool threadPool; + private volatile TaskManager taskManager; private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor; private volatile Batcher taskBatcher; @@ -102,6 +109,10 @@ public MasterService(Settings settings, ClusterSettings clusterSettings, ThreadP this.threadPool = threadPool; } + public void setTaskManager(TaskManager taskManager) { + this.taskManager = taskManager; + } + private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { this.slowTaskLoggingThreshold = slowTaskLoggingThreshold; } @@ -244,41 +255,60 @@ private void runTasks(TaskInputs taskInputs) { logExecutionTime(executionTime, "notify listeners on unchanged cluster state", summary); clusterStateUpdateStatsTracker.onUnchangedClusterState(computationTime.millis(), executionTime.millis()); } else { - final ClusterState newClusterState = taskOutputs.newClusterState; - if (logger.isTraceEnabled()) { - logger.trace("cluster state updated, source [{}]\n{}", summary, newClusterState); - } else { - logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), summary); - } - final long publicationStartTime = threadPool.rawRelativeTimeInMillis(); - try { - final ClusterStatePublicationEvent clusterStatePublicationEvent = new ClusterStatePublicationEvent( - summary, - previousClusterState, - newClusterState, - computationTime.millis(), - publicationStartTime - ); + final Task task = taskManager.register("master", STATE_UPDATE_ACTION_NAME, new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} - // new cluster state, notify all listeners - final DiscoveryNodes.Delta nodesDelta = newClusterState.nodes().delta(previousClusterState.nodes()); - if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { - String nodesDeltaSummary = nodesDelta.shortSummary(); - if (nodesDeltaSummary.length() > 0) { - logger.info( - "{}, term: {}, version: {}, delta: {}", - summary, - newClusterState.term(), - newClusterState.version(), - nodesDeltaSummary - ); - } + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; } - logger.debug("publishing cluster state version [{}]", newClusterState.version()); - publish(clusterStatePublicationEvent, taskOutputs); - } catch (Exception e) { - handleException(summary, publicationStartTime, newClusterState, e); + @Override + public String getDescription() { + return "publication of cluster state [" + taskOutputs.newClusterState.getVersion() + "]"; + } + }); + try { + final ClusterState newClusterState = taskOutputs.newClusterState; + if (logger.isTraceEnabled()) { + logger.trace("cluster state updated, source [{}]\n{}", summary, newClusterState); + } else { + logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), summary); + } + final long publicationStartTime = threadPool.rawRelativeTimeInMillis(); + try { + final ClusterStatePublicationEvent clusterStatePublicationEvent = new ClusterStatePublicationEvent( + summary, + previousClusterState, + newClusterState, + task, + computationTime.millis(), + publicationStartTime + ); + + // new cluster state, notify all listeners + final DiscoveryNodes.Delta nodesDelta = newClusterState.nodes().delta(previousClusterState.nodes()); + if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { + String nodesDeltaSummary = nodesDelta.shortSummary(); + if (nodesDeltaSummary.length() > 0) { + logger.info( + "{}, term: {}, version: {}, delta: {}", + summary, + newClusterState.term(), + newClusterState.version(), + nodesDeltaSummary + ); + } + } + + logger.debug("publishing cluster state version [{}]", newClusterState.version()); + publish(clusterStatePublicationEvent, taskOutputs); + } catch (Exception e) { + handleException(summary, publicationStartTime, newClusterState, e); + } + } finally { + taskManager.unregister(task); } } } diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index ae350934eab75..40eb6b9956bd5 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.tasks.Task; @@ -487,10 +488,7 @@ public boolean isSystemContext() { } @FunctionalInterface - public interface StoredContext extends AutoCloseable { - @Override - void close(); - + public interface StoredContext extends AutoCloseable, Releasable { default void restore() { close(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java index b0498e619fca5..f910bf4dea58b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java @@ -133,7 +133,7 @@ public void clusterChanged(ClusterChangedEvent event) { } } - private void recover(StartRecoveryRequest request, ActionListener listener) { + private void recover(StartRecoveryRequest request, Task task, ActionListener listener) { final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); final IndexShard shard = indexService.getShard(request.shardId().id()); @@ -153,7 +153,7 @@ private void recover(StartRecoveryRequest request, ActionListener { @Override public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel, Task task) throws Exception { - recover(request, new ChannelActionListener<>(channel, Actions.START_RECOVERY, request)); + recover(request, task, new ChannelActionListener<>(channel, Actions.START_RECOVERY, request)); } } @@ -204,10 +204,10 @@ final class OngoingRecoveries { @Nullable private List> emptyListeners; - synchronized RecoverySourceHandler addNewRecovery(StartRecoveryRequest request, IndexShard shard) { + synchronized RecoverySourceHandler addNewRecovery(StartRecoveryRequest request, Task task, IndexShard shard) { assert lifecycle.started(); final ShardRecoveryContext shardContext = ongoingRecoveries.computeIfAbsent(shard, s -> new ShardRecoveryContext()); - final Tuple handlers = shardContext.addNewRecovery(request, shard); + final Tuple handlers = shardContext.addNewRecovery(request, task, shard); final RemoteRecoveryTargetHandler recoveryTargetHandler = handlers.v2(); nodeToHandlers.computeIfAbsent(recoveryTargetHandler.targetNode(), k -> new HashSet<>()).add(recoveryTargetHandler); shard.recoveryStats().incCurrentAsSource(); @@ -307,6 +307,7 @@ private final class ShardRecoveryContext { */ synchronized Tuple addNewRecovery( StartRecoveryRequest request, + Task task, IndexShard shard ) { for (RecoverySourceHandler existingHandler : recoveryHandlers.keySet()) { @@ -317,7 +318,11 @@ synchronized Tuple addNewRec ); } } - final Tuple handlers = createRecoverySourceHandler(request, shard); + final Tuple handlers = createRecoverySourceHandler( + request, + task, + shard + ); recoveryHandlers.put(handlers.v1(), handlers.v2()); return handlers; } @@ -344,6 +349,7 @@ synchronized void reestablishRecovery(ReestablishRecoveryRequest request, Action private Tuple createRecoverySourceHandler( StartRecoveryRequest request, + Task task, IndexShard shard ) { RecoverySourceHandler handler; @@ -353,7 +359,8 @@ private Tuple createRecovery transportService, request.targetNode(), recoverySettings, - throttleTime -> shard.recoveryStats().addThrottleTime(throttleTime) + throttleTime -> shard.recoveryStats().addThrottleTime(throttleTime), + task ); handler = new RecoverySourceHandler( shard, diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index a96a3b7e3576b..eaeb825cc9588 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.store.StoreFileMetadata; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteTransportException; @@ -70,6 +71,7 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { private final Consumer onSourceThrottle; private final boolean retriesSupported; + private final Task task; private volatile boolean isCancelled = false; public RemoteRecoveryTargetHandler( @@ -78,7 +80,8 @@ public RemoteRecoveryTargetHandler( TransportService transportService, DiscoveryNode targetNode, RecoverySettings recoverySettings, - Consumer onSourceThrottle + Consumer onSourceThrottle, + Task task ) { this.transportService = transportService; this.threadPool = transportService.getThreadPool(); @@ -97,6 +100,7 @@ public RemoteRecoveryTargetHandler( ); this.standardTimeoutRequestOptions = TransportRequestOptions.timeout(recoverySettings.internalActionTimeout()); this.retriesSupported = targetNode.getVersion().onOrAfter(Version.V_7_9_0); + this.task = task; } public DiscoveryNode targetNode() { @@ -345,10 +349,11 @@ private void executeRetryableAction( @Override public void tryAction(ActionListener listener) { if (request.tryIncRef()) { - transportService.sendRequest( + transportService.sendChildRequest( targetNode, action, request, + task, options, new ActionListenerResponseHandler<>( ActionListener.runBefore(listener, request::decRef), diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 2ed7202bc8611..9ea9c164d1fc1 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -753,6 +753,8 @@ protected Node( final HttpServerTransport httpServerTransport = newHttpTransport(networkModule); final IndexingPressure indexingLimits = new IndexingPressure(settings); + clusterService.setTaskManager(transportService.getTaskManager()); + final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); final List tracers = pluginComponents.stream() .map(c -> c instanceof Tracer ? (Tracer) c : null) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java index 1179b37068092..55aabc175e0a6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java @@ -198,6 +198,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req clusterSettings, Collections.emptySet() ); + masterService.setTaskManager(transportService.getTaskManager()); coordinator = new Coordinator( "test_node", Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java index b64cdacb7d08b..1518da2ddc5e6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java @@ -30,6 +30,8 @@ import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransport; @@ -49,6 +51,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -102,7 +106,16 @@ public void writeTo(StreamOutput out) throws IOException { final ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> handler.newPublicationContext(new ClusterStatePublicationEvent("test", clusterState, unserializableClusterState, 0L, 0L)) + () -> handler.newPublicationContext( + new ClusterStatePublicationEvent( + "test", + clusterState, + unserializableClusterState, + new Task(randomNonNegativeLong(), "test", STATE_UPDATE_ACTION_NAME, "", TaskId.EMPTY_TASK_ID, emptyMap()), + 0L, + 0L + ) + ) ); assertNotNull(e.getCause()); assertThat(e.getCause(), instanceOf(IOException.class)); @@ -275,7 +288,14 @@ public void writeTo(StreamOutput out) throws IOException { final PublicationTransportHandler.PublicationContext context; try { context = handler.newPublicationContext( - new ClusterStatePublicationEvent("test", prevClusterState, nextClusterState, 0L, 0L) + new ClusterStatePublicationEvent( + "test", + prevClusterState, + nextClusterState, + new Task(randomNonNegativeLong(), "test", STATE_UPDATE_ACTION_NAME, "", TaskId.EMPTY_TASK_ID, emptyMap()), + 0L, + 0L + ) ); } catch (ElasticsearchException e) { assertTrue(simulateFailures); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 14bc034ff46d9..8ca471204e4c6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -131,6 +132,7 @@ private MasterService createMasterService(boolean makeMaster) { publishListener.onResponse(null); }); masterService.setClusterStateSupplier(clusterStateRef::get); + masterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, emptySet())); masterService.start(); return masterService; } @@ -776,6 +778,7 @@ public void testLongClusterStateUpdateLogging() throws Exception { threadPool ) ) { + masterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, emptySet())); final DiscoveryNode localNode = new DiscoveryNode( "node1", @@ -968,6 +971,7 @@ public void testAcking() throws InterruptedException { publisherRef.get().publish(e, pl, al); }); masterService.setClusterStateSupplier(() -> initialClusterState); + masterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, emptySet())); masterService.start(); // check that we don't time out before even committing the cluster state diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java index 7ce07dc1178e6..66503bf4cea79 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java @@ -17,11 +17,15 @@ import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.NodeRoles; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.Collections; +import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; import static org.hamcrest.Matchers.containsString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -52,15 +56,29 @@ public void testDuplicateRecoveries() throws IOException { true ); peerRecoverySourceService.start(); - RecoverySourceHandler handler = peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, primary); + + final Task recoveryTask = new Task( + randomNonNegativeLong(), + "test", + START_RECOVERY, + "", + TaskId.EMPTY_TASK_ID, + Collections.emptyMap() + ); + + RecoverySourceHandler handler = peerRecoverySourceService.ongoingRecoveries.addNewRecovery( + startRecoveryRequest, + recoveryTask, + primary + ); DelayRecoveryException delayRecoveryException = expectThrows( DelayRecoveryException.class, - () -> peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, primary) + () -> peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, recoveryTask, primary) ); assertThat(delayRecoveryException.getMessage(), containsString("recovery with same target already registered")); peerRecoverySourceService.ongoingRecoveries.remove(primary, handler); // re-adding after removing previous attempt works - handler = peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, primary); + handler = peerRecoverySourceService.ongoingRecoveries.addNewRecovery(startRecoveryRequest, recoveryTask, primary); peerRecoverySourceService.ongoingRecoveries.remove(primary, handler); closeShards(primary); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 2a7be83c412ab..33fae0975f39d 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1692,6 +1692,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { null, emptySet() ); + masterService.setTaskManager(transportService.getTaskManager()); final IndexNameExpressionResolver indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(); bigArrays = new BigArrays(new PageCacheRecycler(settings), null, "test"); repositoriesService = new RepositoriesService( diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index cc41ca120ab87..09701224695f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -1179,6 +1179,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { threadPool, runnable -> deterministicTaskQueue.scheduleNow(onNode(runnable)) ); + masterService.setTaskManager(transportService.getTaskManager()); final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); clusterApplierService = new DisruptableClusterApplierService( localNode.getId(), diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index e85493b62821b..ca93c48a453dc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.util.Collections; @@ -125,6 +126,7 @@ public static ClusterService createClusterService(ThreadPool threadPool, Discove clusterService.getClusterApplierService().setInitialState(initialClusterState); clusterService.getMasterService().setClusterStatePublisher(createClusterStatePublisher(clusterService.getClusterApplierService())); clusterService.getMasterService().setClusterStateSupplier(clusterService.getClusterApplierService()::state); + clusterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())); clusterService.start(); return clusterService; } diff --git a/test/framework/src/test/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterServiceTests.java b/test/framework/src/test/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterServiceTests.java index 634604230304e..181ec8aa75060 100644 --- a/test/framework/src/test/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterServiceTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -73,6 +74,7 @@ public void testFakeMasterService() { lastClusterStateRef.set(clusterStatePublicationEvent.getNewState()); publishingCallback.set(publishListener); }); + masterService.setTaskManager(new TaskManager(Settings.EMPTY, mockThreadPool, Collections.emptySet())); masterService.start(); AtomicBoolean firstTaskCompleted = new AtomicBoolean(); diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index b54a54a2781d2..938ad0466bb90 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -18,11 +18,13 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.cluster.coordination.PublicationTransportHandler; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.tasks.Task; @@ -42,9 +44,12 @@ import java.util.concurrent.TimeUnit; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; +import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; @@ -126,6 +131,68 @@ public void testRecordsNestedSpans() { } } + public void testRecovery() throws Exception { + internalCluster().ensureAtLeastNumDataNodes(2); + + assertAcked( + client().admin() + .indices() + .prepareCreate("test-index") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + ) + ); + + ensureGreen("test-index"); + + indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); + flushAndRefresh("test-index"); + + final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; + spanExporter.clear(); + + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings("test-index") + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) + ); + + ensureGreen("test-index"); + + final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) + .findAny() + .orElseThrow(() -> new AssertionError("not found")); + + final List clusterUpdateChildActions = spanExporter.findSpan( + spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) + ).map(SpanData::getName).collect(toList()); + + assertThat( + clusterUpdateChildActions, + hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) + ); + + final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) + .findAny() + .orElseThrow(() -> new AssertionError("not found")); + final List recoveryChildActions = spanExporter.findSpan( + spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) + ).map(SpanData::getName).collect(toList()); + + assertThat( + recoveryChildActions, + hasItems( + PeerRecoveryTargetService.Actions.FILES_INFO, + PeerRecoveryTargetService.Actions.FILE_CHUNK, + PeerRecoveryTargetService.Actions.CLEAN_FILES, + PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, + PeerRecoveryTargetService.Actions.FINALIZE + ) + ); + + } + public void testSearch() throws Exception { internalCluster().ensureAtLeastNumDataNodes(2); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 624c89a543847..47ac65a1a2778 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Releasable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.NetworkPlugin; @@ -98,20 +99,26 @@ public void sendRequest( TransportRequestOptions options, TransportResponseHandler handler ) { - var aTracer = tracer.get(); - if (aTracer == null || aTracer.isEnabled() == false) { + try (var ignored = withParentContext(String.valueOf(request.getParentTask().getId()))) { sender.sendRequest(connection, action, request, options, handler); - return; } - var headers = aTracer.getSpanHeadersById(String.valueOf(request.getParentTask().getId())); - if (headers == null) { - sender.sendRequest(connection, action, request, options, handler); - return; + } + + private Releasable withParentContext(String parentTaskId) { + var aTracer = tracer.get(); + if (aTracer == null) { + return null; } - try (var ignore = threadContext.removeRequestHeaders(TRACE_HEADERS)) { - threadContext.putHeader(headers); - sender.sendRequest(connection, action, request, options, handler); + if (aTracer.isEnabled() == false) { + return null; + } + var headers = aTracer.getSpanHeadersById(parentTaskId); + if (headers == null) { + return null; } + final Releasable releasable = threadContext.removeRequestHeaders(TRACE_HEADERS); + threadContext.putHeader(headers); + return releasable; } } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 140657211fec9..f3f8e584e8dbf 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -28,7 +28,6 @@ import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; - import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; import org.elasticsearch.Version; @@ -125,7 +124,8 @@ protected void doStart() { protected void doStop() { destroyApmServices(); try { - shutdownPermits.tryAcquire(Integer.MAX_VALUE, 30L, TimeUnit.SECONDS); + final boolean stopped = shutdownPermits.tryAcquire(Integer.MAX_VALUE, 30L, TimeUnit.SECONDS); + assert stopped : "did not stop tracing within timeout"; } catch (InterruptedException e) { Thread.currentThread().interrupt(); } @@ -196,7 +196,7 @@ public void onTraceStarted(Traceable traceable) { return; } spans.computeIfAbsent(traceable.getSpanId(), spanId -> { - // services might be in shutdown sate by this point, but this is handled by the open telemetry internally + // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); Context parentContext = getParentSpanContext(services.openTelemetry); if (parentContext != null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 41a29162da9f1..986ce907c1af1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -328,7 +328,7 @@ private Runnable maybeStartTracing( TransportRequest originalRequest ) { // Not tracing system actions - if (false == tracingEnabled || SystemUser.is(authentication.getUser())) { + if (false == tracingEnabled || SystemUser.is(authentication.getUser()) || threadContext.isSystemContext()) { return () -> {}; } else { return authorizationTracer.startTracing(new Traceable() { From 0d58db7fee8edb12f744a4aac6a07cb290d020fe Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Mon, 22 Nov 2021 15:04:23 +0200 Subject: [PATCH 16/90] Add `xpack.apm.tracing.names.include` setting for filtering (#80871) This commit adds a dynamic cluster setting called `xpack.apm.tracing.names.include` which allows the user to filter on the names of the transactions for which tracing is enabled. --- .../org/elasticsearch/xpack/apm/ApmIT.java | 100 ++++++++++++++++++ .../java/org/elasticsearch/xpack/apm/APM.java | 7 +- .../elasticsearch/xpack/apm/APMTracer.java | 29 +++++ 3 files changed, 135 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 938ad0466bb90..5a722e11fee41 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -10,6 +10,7 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.sdk.trace.data.SpanData; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -42,11 +43,13 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; @@ -294,4 +297,101 @@ public void testDoesNotRecordSpansWhenDisabled() { .actionGet(); } } + + public void testFilterByNameGivenSingleCompleteMatch() { + + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() + ) + ) + .actionGet(); + + APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + + try { + client().admin().cluster().prepareListTasks().get(); + + var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + assertThat(parentTasks, hasSize(1)); + + var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + assertThat(childrenTasks, empty()); + } finally { + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + ) + ) + .actionGet(); + } + } + + public void testFilterByNameGivenSinglePattern() { + + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() + ) + ) + .actionGet(); + + APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + + try { + client().admin().cluster().prepareListTasks().get(); + + var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + assertThat(parentTasks, hasSize(1)); + + var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + assertThat(childrenTasks, hasSize(internalCluster().size())); + } finally { + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + ) + ) + .actionGet(); + } + } + + public void testFilterByNameGivenTwoPatterns() { + + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() + ) + ) + .actionGet(); + + APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + + try { + client().admin().cluster().prepareListTasks().get(); + client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); + + var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); + assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); + } finally { + client().admin() + .cluster() + .updateSettings( + new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + ) + ) + .actionGet(); + } + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 47ac65a1a2778..b5ffc4ff121ef 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -69,7 +69,12 @@ public Collection createComponents( @Override public List> getSettings() { - return List.of(APMTracer.APM_ENABLED_SETTING, APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING); + return List.of( + APMTracer.APM_ENABLED_SETTING, + APMTracer.APM_ENDPOINT_SETTING, + APMTracer.APM_TOKEN_SETTING, + APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING + ); } public List getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index f3f8e584e8dbf..d46823634f452 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -46,6 +47,7 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -53,6 +55,7 @@ import java.util.Queue; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -67,6 +70,13 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); + static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( + "xpack.apm.tracing.names.include", + Collections.emptyList(), + Function.identity(), + Dynamic, + NodeScope + ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); private final Map spans = ConcurrentCollections.newConcurrentMap(); @@ -78,6 +88,8 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private volatile boolean enabled; private volatile APMServices services; + private List includeNames; + /** This class is required to make all open telemetry services visible at once */ private static class APMServices { private final SdkTracerProvider provider; @@ -97,7 +109,9 @@ public APMTracer(Settings settings, ThreadPool threadPool, ClusterService cluste this.endpoint = APM_ENDPOINT_SETTING.get(settings); this.token = APM_TOKEN_SETTING.get(settings); this.enabled = APM_ENABLED_SETTING.get(settings); + this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); + clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); } public boolean isEnabled() { @@ -113,6 +127,10 @@ private void setEnabled(boolean enabled) { } } + private void setIncludeNames(List includeNames) { + this.includeNames = includeNames; + } + @Override protected void doStart() { if (enabled) { @@ -195,6 +213,11 @@ public void onTraceStarted(Traceable traceable) { if (services == null) { return; } + + if (isSpanNameIncluded(traceable.getSpanName()) == false) { + return; + } + spans.computeIfAbsent(traceable.getSpanId(), spanId -> { // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); @@ -245,6 +268,12 @@ public void onTraceStarted(Traceable traceable) { }); } + private boolean isSpanNameIncluded(String name) { + // Alternatively we could use automata here but it is much more complex + // and it needs wrapping like done for use in the security plugin. + return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); + } + private Context getParentSpanContext(OpenTelemetry openTelemetry) { // If we already have a non-root span context that should be the parent if (Context.current() != Context.root()) { From 813252769fa47d835101060beace61806d09c83b Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 16:37:05 +0000 Subject: [PATCH 17/90] Fix compilation issue --- x-pack/plugin/apm-integration/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 2fa3c336e2334..96be4ee90e20f 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -42,7 +42,7 @@ dependencies { // required by io.grpc implementation 'io.perfmark:perfmark-api:0.24.0' implementation 'io.perfmark:perfmark-impl:0.24.0' - implementation 'com.google.guava:failureaccess:1.0.1' + runtimeOnly 'com.google.guava:failureaccess:1.0.1' // required by grpc-netty api "io.netty:netty-buffer:${versions.netty}" api "io.netty:netty-transport:${versions.netty}" From 84b558d0026917ae5784d8ac59559c9cad619b0e Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 16:42:03 +0000 Subject: [PATCH 18/90] Update SHAs --- .../apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 | 1 - .../apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 | 1 + .../apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 | 1 - .../apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.66.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.74.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.66.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.74.Final.jar.sha1 | 1 + .../apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 | 1 - .../apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 | 1 + .../apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 | 1 - .../apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.66.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.74.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.66.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.74.Final.jar.sha1 | 1 + 16 files changed, 8 insertions(+), 8 deletions(-) delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 diff --git a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 deleted file mode 100644 index 973ba015d2079..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d4be9506ea5f54af58bcd596ba3fe2fc5036413 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..d667ddc5111f2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +fd49b6a3a7aa2e5d4922cf125b52d880c1a8b7bd \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 deleted file mode 100644 index ae8837c2664a8..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7cfff848e6c1294645638d74fce6ad89cc6f3f3 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..057f44bbd7831 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +cbc1d14c827a27cef5d6583f8978445c8b4445d2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 deleted file mode 100644 index 74435145e041c..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -15fff6bae9e4b09ba5d48a70bb88841c9fc22a32 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..82f26e527ec26 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +73c7bd6341cb59feab6f56200b1e2d908b054fd4 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 deleted file mode 100644 index 3b563c112dcc5..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -83f51766236096bd6d493a9f858711fd7974268e \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..e5f4d78b1ce4e --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +5eaaf9147527ec435fbecf3c57f5b8264886d126 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 deleted file mode 100644 index 164add2d48e57..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d1c4eda38f525a02fb1ea8d94a8d98dc2935fd02 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..9705a19ec50ad --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +891b8ad3206469762b20c73f45d0d2e24cff3dd2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 deleted file mode 100644 index 657b3ad736c1e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e6ec9b58725a96b2bd0f173709b59c79175225c \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..0ed1df7b49273 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +fc9d000dfaea5719192929f943357a89f1cbf81c \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 deleted file mode 100644 index 4a085c20c9ec0..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f018d8df6f533c3d75dc5fdb11071bc2e7b591b \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..aa8170f51cd2f --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +9209265687a125259fe0396b57d8ccc79697d40e \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 deleted file mode 100644 index c21ce614d86e9..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.66.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3511bc4e13198de644eefe4c8c758245145da128 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 new file mode 100644 index 0000000000000..825ded05e1283 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 @@ -0,0 +1 @@ +d760fb9f5d12c93887e171c442e30862a9898d59 \ No newline at end of file From 3badd42aec6ac29a2d99c8e1e27d85c149d3ebd3 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 16:43:10 +0000 Subject: [PATCH 19/90] Compilation fix --- .../xpack/security/rest/SecurityRestFilter.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index a235b2cd9ae0f..81a108d27a793 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -12,9 +12,8 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -115,8 +114,8 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c try { // Populate x-opaque-id if not already exists to chain all related actions together if (authentication != null && false == SystemUser.is(authentication.getUser())) { - if (threadContext.getHeader(Task.X_OPAQUE_ID) == null) { - threadContext.putHeader(Task.X_OPAQUE_ID, UUIDs.base64UUID()); + if (threadContext.getHeader(Task.X_OPAQUE_ID_HTTP_HEADER) == null) { + threadContext.putHeader(Task.X_OPAQUE_ID_HTTP_HEADER, UUIDs.base64UUID()); } } restHandler.handleRequest(request, channel, client); From 3d35bd4288947229c47978a6a3d5749121274371 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 21:14:08 +0000 Subject: [PATCH 20/90] Tweaks --- .../src/main/java/org/elasticsearch/tasks/TaskTracer.java | 2 +- x-pack/plugin/apm-integration/build.gradle | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java index ec631b369a9e9..f12f94c57485c 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java @@ -18,7 +18,7 @@ public class TaskTracer { - private static final Logger logger = LogManager.getLogger(); + private static final Logger logger = LogManager.getLogger(TaskTracer.class); private final List tracers = new CopyOnWriteArrayList<>(); diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 96be4ee90e20f..651415ec3da2b 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -105,8 +105,6 @@ tasks.named("thirdPartyAudit").configure { 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', - 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', - 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess' ) @@ -149,7 +147,6 @@ tasks.named("thirdPartyAudit").configure { 'io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder', 'io.grpc.netty.shaded.io.netty.handler.ssl.SslContextBuilder', 'io.grpc.okhttp.OkHttpChannelBuilder', - 'io.netty.handler.proxy.HttpProxyHandler', 'io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod', 'io.netty.internal.tcnative.AsyncTask', 'io.netty.internal.tcnative.Buffer', @@ -166,11 +163,8 @@ tasks.named("thirdPartyAudit").configure { 'io.netty.internal.tcnative.SniHostNameMatcher', 'io.opentelemetry.sdk.logs.data.Severity', 'lzma.sdk.lzma.Encoder', - 'org.bouncycastle.asn1.x500.X500Name', 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', - 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', - 'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', 'org.conscrypt.AllocatedBuffer', 'org.conscrypt.BufferAllocator', From 2e3aba15eff7dd3f240dc2f04613d656ccec9dfc Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 21:36:23 +0000 Subject: [PATCH 21/90] Formatting --- .../java/org/elasticsearch/cluster/service/MasterService.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 713ffb0524841..2d8eb8cda29d5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -422,8 +422,7 @@ public void onFailure(Exception exception) { } catch (Exception e) { handleException(summary, publicationStartTime, newClusterState, e); } - } - finally { + } finally { taskManager.unregister(task); } } From 36c8943c29784152c46da33b1bfa80eb6476b6cb Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 3 Mar 2022 21:40:00 +0000 Subject: [PATCH 22/90] Fix 3rd party errors --- x-pack/plugin/apm-integration/build.gradle | 47 ++++++++++++---------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 651415ec3da2b..a9e4b567dfb9e 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -73,31 +73,26 @@ tasks.named("dependencyLicenses").configure { tasks.named("thirdPartyAudit").configure { ignoreViolations( - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueConsumerIndexField', - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerIndexField', - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerLimitField', - 'io.opentelemetry.internal.shaded.jctools.util.UnsafeAccess', - 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess', - 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$Cell', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.cache.Striped64', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'com.google.common.hash.Striped64', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'com.google.common.hash.Striped64$1', 'com.google.common.hash.Striped64$Cell', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.hash.Striped64', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', - 'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$2', 'io.netty.util.internal.PlatformDependent0$3', 'io.netty.util.internal.PlatformDependent0$5', + 'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', @@ -105,37 +100,41 @@ tasks.named("thirdPartyAudit").configure { 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', - 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess' + 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueConsumerIndexField', + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerIndexField', + 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerLimitField', + 'io.opentelemetry.internal.shaded.jctools.util.UnsafeAccess', + 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess', ) ignoreMissingClasses( - 'io.opentelemetry.sdk.logs.data.Body', - 'io.opentelemetry.sdk.logs.data.LogData', - 'io.opentelemetry.sdk.logs.export.LogExporter', 'android.net.ssl.SSLSockets', 'android.os.Build$VERSION', 'android.util.Log', 'com.aayushatharva.brotli4j.Brotli4jLoader', 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', - 'com.aayushatharva.brotli4j.encoder.Encoder', 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', + 'com.aayushatharva.brotli4j.encoder.Encoder', 'com.github.luben.zstd.Zstd', 'com.google.gson.stream.JsonReader', 'com.google.gson.stream.JsonToken', 'com.google.protobuf.ExtensionRegistry', 'com.google.protobuf.ExtensionRegistryLite', - 'com.google.protobuf.MessageLite', 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLite', 'com.google.protobuf.MessageLiteOrBuilder', 'com.google.protobuf.Parser', 'com.google.protobuf.nano.CodedOutputByteBufferNano', 'com.google.protobuf.nano.MessageNano', 'com.jcraft.jzlib.Deflater', 'com.jcraft.jzlib.Inflater', - 'com.jcraft.jzlib.JZlib', 'com.jcraft.jzlib.JZlib$WrapperType', + 'com.jcraft.jzlib.JZlib', 'com.ning.compress.BufferRecycler', 'com.ning.compress.lzf.ChunkDecoder', 'com.ning.compress.lzf.ChunkEncoder', @@ -151,6 +150,7 @@ tasks.named("thirdPartyAudit").configure { 'io.netty.internal.tcnative.AsyncTask', 'io.netty.internal.tcnative.Buffer', 'io.netty.internal.tcnative.CertificateCallback', + 'io.netty.internal.tcnative.CertificateCompressionAlgo', 'io.netty.internal.tcnative.CertificateVerifier', 'io.netty.internal.tcnative.Library', 'io.netty.internal.tcnative.ResultCallback', @@ -161,22 +161,25 @@ tasks.named("thirdPartyAudit").configure { 'io.netty.internal.tcnative.SSLSessionCache', 'io.netty.internal.tcnative.SessionTicketKey', 'io.netty.internal.tcnative.SniHostNameMatcher', + 'io.opentelemetry.sdk.logs.data.Body', + 'io.opentelemetry.sdk.logs.data.LogData', 'io.opentelemetry.sdk.logs.data.Severity', + 'io.opentelemetry.sdk.logs.export.LogExporter', 'lzma.sdk.lzma.Encoder', 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', 'org.conscrypt.AllocatedBuffer', 'org.conscrypt.BufferAllocator', - 'org.conscrypt.Conscrypt', 'org.conscrypt.Conscrypt$ProviderBuilder', + 'org.conscrypt.Conscrypt', 'org.conscrypt.HandshakeListener', - 'org.eclipse.jetty.alpn.ALPN', 'org.eclipse.jetty.alpn.ALPN$ClientProvider', 'org.eclipse.jetty.alpn.ALPN$ServerProvider', - 'org.eclipse.jetty.npn.NextProtoNego', + 'org.eclipse.jetty.alpn.ALPN', 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', 'org.jboss.marshalling.ByteInput', 'org.jboss.marshalling.ByteOutput', 'org.jboss.marshalling.Marshaller', @@ -189,6 +192,6 @@ tasks.named("thirdPartyAudit").configure { 'org.slf4j.helpers.MessageFormatter', 'org.slf4j.spi.LocationAwareLogger', 'reactor.blockhound.BlockHound$Builder', - 'reactor.blockhound.integration.BlockHoundIntegration' + 'reactor.blockhound.integration.BlockHoundIntegration', ) } From 99b948c1df1848e830e8a24dc594e1a45d57d078 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 11 Mar 2022 18:58:49 +0000 Subject: [PATCH 23/90] WIP - hacks to make distributed tracing work --- .../elasticsearch/action/ActionModule.java | 11 ++++-- .../java/org/elasticsearch/node/Node.java | 12 +++--- .../elasticsearch/rest/RestController.java | 12 ++++-- .../java/org/elasticsearch/tasks/Task.java | 7 +++- .../org/elasticsearch/tracing/Tracer.java | 2 + .../action/ActionModuleTests.java | 16 ++++---- .../rest/RestControllerTests.java | 26 ++++++------- .../rest/RestHttpResponseHeadersTests.java | 2 +- .../indices/RestValidateQueryActionTests.java | 2 +- .../test/rest/RestActionTestCase.java | 2 +- .../java/org/elasticsearch/xpack/apm/APM.java | 4 +- .../elasticsearch/xpack/apm/APMTracer.java | 39 +++++++++++++------ .../action/RestTermsEnumActionTests.java | 2 +- .../xpack/security/SecurityTests.java | 4 +- 14 files changed, 89 insertions(+), 52 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 4f2879aac04ff..0877da0f07e46 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -405,12 +405,14 @@ import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.usage.UsageService; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import java.util.function.Supplier; @@ -441,6 +443,7 @@ public class ActionModule extends AbstractModule { private final RequestValidators mappingRequestValidators; private final RequestValidators indicesAliasesRequestRequestValidators; private final ThreadPool threadPool; + private final List tracers; public ActionModule( Settings settings, @@ -453,8 +456,8 @@ public ActionModule( NodeClient nodeClient, CircuitBreakerService circuitBreakerService, UsageService usageService, - SystemIndices systemIndices - ) { + SystemIndices systemIndices, + List tracers) { this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; this.indexScopedSettings = indexScopedSettings; @@ -464,6 +467,8 @@ public ActionModule( this.threadPool = threadPool; actions = setupActions(actionPlugins); actionFilters = setupActionFilters(actionPlugins); + this.tracers = Objects.requireNonNullElse(tracers, List.of()); + autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver, systemIndices); destructiveOperations = new DestructiveOperations(settings, clusterSettings); Set headers = Stream.concat( @@ -501,7 +506,7 @@ public ActionModule( actionPlugins.stream().flatMap(p -> p.indicesAliasesRequestValidators().stream()).collect(Collectors.toList()) ); - restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService); + restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService, tracers); } public Map> getActions() { diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b7b085585c1a3..930c50758d1d7 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -700,6 +700,11 @@ protected Node( ) .collect(Collectors.toList()); + final List tracers = pluginComponents.stream() + .map(c -> c instanceof Tracer ? (Tracer) c : null) + .filter(Objects::nonNull) + .collect(Collectors.toUnmodifiableList()); + ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), @@ -711,7 +716,8 @@ protected Node( client, circuitBreakerService, usageService, - systemIndices + systemIndices, + tracers ); modules.add(actionModule); @@ -772,10 +778,6 @@ protected Node( clusterService.setTaskManager(transportService.getTaskManager()); final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - final List tracers = pluginComponents.stream() - .map(c -> c instanceof Tracer ? (Tracer) c : null) - .filter(Objects::nonNull) - .collect(Collectors.toUnmodifiableList()); tracers.forEach(taskTracer::addTracer); pluginsService.filterPlugins(Plugin.class).forEach(plugin -> plugin.onTracers(tracers)); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 5006b7f8e2dde..564769c408814 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.usage.UsageService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -91,14 +92,15 @@ public class RestController implements HttpServerTransport.Dispatcher { /** Rest headers that are copied to internal requests made during a rest request. */ private final Set headersToCopy; private final UsageService usageService; + private final List tracers; public RestController( Set headersToCopy, UnaryOperator handlerWrapper, NodeClient client, CircuitBreakerService circuitBreakerService, - UsageService usageService - ) { + UsageService usageService, + List tracers) { this.headersToCopy = headersToCopy; this.usageService = usageService; if (handlerWrapper == null) { @@ -107,6 +109,7 @@ public RestController( this.handlerWrapper = handlerWrapper; this.client = client; this.circuitBreakerService = circuitBreakerService; + this.tracers = tracers; registerHandlerNoWrap( RestRequest.Method.GET, "/favicon.ico", @@ -498,7 +501,10 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t } else if (name.equals(Task.TRACE_PARENT_HTTP_HEADER)) { String traceparent = distinctHeaderValues.get(0); if (traceparent.length() >= 55) { - threadContext.putHeader(Task.TRACE_ID, traceparent.substring(3, 35)); + final String traceId = traceparent.substring(3, 35); + threadContext.putHeader(Task.TRACE_ID, traceId); + threadContext.putHeader(Task.TRACE_PARENT_HTTP_HEADER, traceparent); + tracers.forEach(t -> t.setTraceParent(traceparent)); } } else { threadContext.putHeader(name, String.join(",", distinctHeaderValues)); diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index c50e47d2b6fc1..df9062e515552 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -55,7 +55,12 @@ public class Task implements Traceable { */ public static final String TRACE_ID = "trace.id"; - public static final Set HEADERS_TO_COPY = Set.of(X_OPAQUE_ID_HTTP_HEADER, TRACE_ID, X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER); + public static final Set HEADERS_TO_COPY = Set.of( + X_OPAQUE_ID_HTTP_HEADER, + TRACE_PARENT_HTTP_HEADER, + TRACE_ID, + X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER + ); private final long id; diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index bba75825681a5..dadf22bb0e387 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -29,4 +29,6 @@ public interface Tracer { * Retrieve context related headers for the span of the given id. */ Map getSpanHeadersById(String id); + + void setTraceParent(String traceId); } diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 0dd385a069208..dd7fd47b4d06d 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -115,8 +115,8 @@ public void testSetupRestHandlerContainsKnownBuiltin() { null, null, usageService, - null - ); + null, + null); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( @@ -171,8 +171,8 @@ public String getName() { null, null, usageService, - null - ); + null, + null); Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); assertThat(e.getMessage(), startsWith("Cannot replace existing handler for [/] for method: GET")); } finally { @@ -220,8 +220,8 @@ public List getRestHandlers( null, null, usageService, - null - ); + null, + null); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( @@ -264,8 +264,8 @@ public void test3rdPartyHandlerIsNotInstalled() { null, null, usageService, - null - ) + null, + null) ); assertThat( e.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 542f34f69585f..e60a01de2f4be 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -94,7 +94,7 @@ public void setup() { HttpServerTransport httpServerTransport = new TestHttpServerTransport(); client = new NoOpNodeClient(this.getTestName()); - restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); restController.registerHandler( new Route(GET, "/"), (request, channel, client) -> channel.sendResponse( @@ -118,7 +118,7 @@ public void testApplyRelevantHeaders() throws Exception { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", true)) ); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("true")); restHeaders.put("header.2", Collections.singletonList("true")); @@ -160,7 +160,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); @@ -173,7 +173,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { public void testTraceParentAndTraceId() throws Exception { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set headers = new HashSet<>(Arrays.asList(new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false))); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); Map> restHeaders = new HashMap<>(); restHeaders.put( Task.TRACE_PARENT_HTTP_HEADER, @@ -208,7 +208,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(headers, null, client, circuitBreakerService, usageService); + final RestController restController = new RestController(headers, null, client, circuitBreakerService, usageService, tracers); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "foo")); @@ -279,7 +279,7 @@ public void testRegisterAsReplacedHandler() { } public void testRegisterSecondMethodWithDifferentNamedWildcard() { - final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService); + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, tracers); RestRequest.Method firstMethod = randomFrom(RestRequest.Method.values()); RestRequest.Method secondMethod = randomFrom( @@ -308,7 +308,7 @@ public void testRestHandlerWrapper() throws Exception { final RestController restController = new RestController(Collections.emptySet(), h -> { assertSame(handler, h); return (RestRequest request, RestChannel channel, NodeClient client) -> wrapperCalled.set(true); - }, client, circuitBreakerService, usageService); + }, client, circuitBreakerService, usageService, tracers); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); @@ -371,7 +371,7 @@ public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); - restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); + restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService, tracers); restController.registerHandler( new Route(GET, "/"), (r, c, client) -> c.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)) @@ -691,7 +691,7 @@ public Exception getInboundException() { public void testDispatchCompatibleHandler() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -715,7 +715,7 @@ public void testDispatchCompatibleHandler() { public void testDispatchCompatibleRequestToNewlyAddedHandler() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -750,7 +750,7 @@ private FakeRestRequest requestWithContent(String mediaType) { } public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); final RestApiVersion version = RestApiVersion.current(); @@ -775,7 +775,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { } public void testCustomMediaTypeValidation() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -801,7 +801,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } public void testBrowserSafelistedContentTypesAreRejected() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 2c18994462eab..6b3004bb5ee5a 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -80,7 +80,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { final Settings settings = Settings.EMPTY; UsageService usageService = new UsageService(); - RestController restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); + RestController restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService, tracers); // A basic RestHandler handles requests to the endpoint RestHandler restHandler = (request, channel, client) -> channel.sendResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index e2612383ce04c..7ad1b72858244 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -54,7 +54,7 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { private NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private UsageService usageService = new UsageService(); - private RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService); + private RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService, tracers); private RestValidateQueryAction action = new RestValidateQueryAction(); /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java index b539338337ca9..4d50aacd65cb0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java @@ -40,7 +40,7 @@ public abstract class RestActionTestCase extends ESTestCase { @Before public void setUpController() { verifyingClient = new VerifyingClient(this.getTestName()); - controller = new RestController(Collections.emptySet(), null, verifyingClient, new NoneCircuitBreakerService(), new UsageService()); + controller = new RestController(Collections.emptySet(), null, verifyingClient, new NoneCircuitBreakerService(), new UsageService(), tracers); } @After diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index b0ee2c6585983..dd57efe9f596c 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -39,7 +39,6 @@ import java.util.function.Supplier; public class APM extends Plugin implements NetworkPlugin { - public static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT_HTTP_HEADER, Task.TRACE_STATE); private final SetOnce tracer = new SetOnce<>(); @@ -73,7 +72,8 @@ public List> getSettings() { APMTracer.APM_ENABLED_SETTING, APMTracer.APM_ENDPOINT_SETTING, APMTracer.APM_TOKEN_SETTING, - APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING + APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING, + APMTracer.APM_SAMPLE_RATE_SETTING ); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 4f5cd8123ec91..a40721de154ac 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -27,9 +27,12 @@ import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; +import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -65,11 +68,14 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { + private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); + public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); + static final Setting APM_SAMPLE_RATE_SETTING = Setting.floatSetting("xpack.apm.tracing.sample_rate", 1.0f, Dynamic, NodeScope); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( "xpack.apm.tracing.names.include", Collections.emptyList(), @@ -86,23 +92,20 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private final SecureString token; private volatile boolean enabled; + private volatile float sampleRate; private volatile APMServices services; private List includeNames; - /** This class is required to make all open telemetry services visible at once */ - private static class APMServices { - private final SdkTracerProvider provider; - private final Tracer tracer; - private final OpenTelemetry openTelemetry; - - private APMServices(SdkTracerProvider provider, Tracer tracer, OpenTelemetry openTelemetry) { - this.provider = provider; - this.tracer = tracer; - this.openTelemetry = openTelemetry; - } + public void setSampleRate(float sampleRate) { + this.sampleRate = sampleRate; } + /** + * This class is required to make all open telemetry services visible at once + */ + private record APMServices(SdkTracerProvider provider, Tracer tracer, OpenTelemetry openTelemetry) {} + public APMTracer(Settings settings, ThreadPool threadPool, ClusterService clusterService) { this.threadPool = Objects.requireNonNull(threadPool); this.clusterService = Objects.requireNonNull(clusterService); @@ -110,8 +113,10 @@ public APMTracer(Settings settings, ThreadPool threadPool, ClusterService cluste this.token = APM_TOKEN_SETTING.get(settings); this.enabled = APM_ENABLED_SETTING.get(settings); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); + this.sampleRate = APM_SAMPLE_RATE_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); + clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_SAMPLE_RATE_SETTING, this::setSampleRate); } public boolean isEnabled() { @@ -183,6 +188,8 @@ private void createApmServices() { ) ) ) + // TODO make dynamic + .setSampler(Sampler.traceIdRatioBased(this.sampleRate)) .addSpanProcessor(createSpanProcessor(endpoint, token)) .build() ); @@ -295,6 +302,16 @@ private Context getParentSpanContext(OpenTelemetry openTelemetry) { return null; } + @Override + public void setTraceParent(String traceparent) { + // traceparent and tracestate should match the keys used by W3CTraceContextPropagator + // TODO tracestate? + services.openTelemetry.getPropagators() + .getTextMapPropagator() + .extract(Context.current(), Map.of(Task.TRACE_PARENT_HTTP_HEADER, traceparent), new MapKeyGetter()) + .makeCurrent(); + } + @Override public Map getSpanHeadersById(String id) { var services = this.services; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index cc67e727aa6e1..42586464bbd74 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -52,7 +52,7 @@ public class RestTermsEnumActionTests extends ESTestCase { private static NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private static UsageService usageService = new UsageService(); - private static RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService); + private static RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService, tracers); private static RestTermsEnumAction action = new RestTermsEnumAction(); /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 79aeb4e9b58e9..a57c9fd982acb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -704,8 +704,8 @@ public void testSecurityRestHandlerWrapperCanBeInstalled() throws IllegalAccessE null, null, usageService, - null - ); + null, + null); actionModule.initRestHandlers(null); appender.assertAllExpectationsMatched(); From 2dec2582eef29743d941799f957e4cd680dbe341 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 15 Mar 2022 14:26:53 +0000 Subject: [PATCH 24/90] WIP - trying to get REST tracing working --- .../netty4/Netty4HttpServerTransport.java | 8 ++- .../transport/netty4/Netty4Plugin.java | 8 ++- .../http/netty4/Netty4BadRequestTests.java | 4 +- .../Netty4HttpServerPipeliningTests.java | 4 +- .../Netty4HttpServerTransportTests.java | 22 ++++-- .../http/nio/NioHttpServerTransport.java | 7 +- .../transport/nio/NioTransportPlugin.java | 24 ++++--- .../http/nio/NioHttpServerTransportTests.java | 22 ++++-- .../common/network/NetworkModule.java | 7 +- .../http/AbstractHttpServerTransport.java | 15 ++-- .../http/DefaultRestChannel.java | 27 ++++--- .../org/elasticsearch/http/HttpTracer.java | 70 +++++++++++++------ .../java/org/elasticsearch/node/Node.java | 8 +-- .../elasticsearch/plugins/NetworkPlugin.java | 5 +- .../org/elasticsearch/rest/RestChannel.java | 25 ++++++- .../org/elasticsearch/tracing/Tracer.java | 2 + .../common/network/NetworkModuleTests.java | 61 ++++++++-------- .../AbstractHttpServerTransportTests.java | 24 +++---- .../elasticsearch/xpack/apm/APMTracer.java | 27 ++++--- .../core/LocalStateCompositeXPackPlugin.java | 25 +++---- .../xpack/security/Security.java | 11 +-- .../SecurityNetty4HttpServerTransport.java | 8 ++- .../nio/SecurityNioHttpServerTransport.java | 8 ++- ...ecurityNetty4HttpServerTransportTests.java | 29 ++++---- .../SecurityNioHttpServerTransportTests.java | 22 ++++-- 25 files changed, 295 insertions(+), 178 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 2a359304ab247..e87654184fd43 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -49,6 +49,7 @@ import org.elasticsearch.http.HttpReadTimeoutException; import org.elasticsearch.http.HttpServerChannel; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.Netty4Utils; import org.elasticsearch.transport.netty4.NettyAllocator; import org.elasticsearch.transport.netty4.NettyByteBufSizer; @@ -57,6 +58,7 @@ import java.net.InetSocketAddress; import java.net.SocketOption; +import java.util.List; import java.util.concurrent.TimeUnit; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE; @@ -146,9 +148,9 @@ public Netty4HttpServerTransport( NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, ClusterSettings clusterSettings, - SharedGroupFactory sharedGroupFactory - ) { - super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings); + SharedGroupFactory sharedGroupFactory, + List tracers) { + super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, tracers); Netty4Utils.setAvailableProcessors(EsExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); this.sharedGroupFactory = sharedGroupFactory; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java index 8662e6a6e9404..3375d8f79a55f 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java @@ -24,6 +24,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -98,8 +99,8 @@ public Map> getHttpTransports( NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + ClusterSettings clusterSettings, + List tracers) { return Collections.singletonMap( NETTY_HTTP_TRANSPORT_NAME, () -> new Netty4HttpServerTransport( @@ -110,7 +111,8 @@ public Map> getHttpTransports( xContentRegistry, dispatcher, clusterSettings, - getSharedGroupFactory(settings) + getSharedGroupFactory(settings), + tracers ) ); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java index bbb21b3f3d864..67b48ecd40678 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java @@ -88,8 +88,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(Settings.EMPTY) - ) + new SharedGroupFactory(Settings.EMPTY), + tracers) ) { httpServerTransport.start(); final TransportAddress transportAddress = randomFrom(httpServerTransport.boundAddress().boundAddresses()); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 4902f5842ed1b..09d9f11527516 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -111,8 +111,8 @@ class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { xContentRegistry(), new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + tracers); } @Override diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index c01a33f1749da..d706a4f1631aa 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -69,6 +69,7 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -174,7 +175,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, xContentRegistry(), dispatcher, clusterSettings, - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + List.of() ) ) { transport.start(); @@ -223,7 +225,8 @@ public void testBindUnavailableAddress() { xContentRegistry(), new NullDispatcher(), clusterSettings, - new SharedGroupFactory(Settings.EMPTY) + new SharedGroupFactory(Settings.EMPTY), + List.of() ) ) { transport.start(); @@ -241,7 +244,8 @@ public void testBindUnavailableAddress() { xContentRegistry(), new NullDispatcher(), clusterSettings, - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + List.of() ) ) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, otherTransport::start); @@ -293,7 +297,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, clusterSettings, - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + List.of() ) ) { transport.start(); @@ -355,7 +360,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, clusterSettings, - new SharedGroupFactory(Settings.EMPTY) + new SharedGroupFactory(Settings.EMPTY), + List.of() ) ) { transport.start(); @@ -424,7 +430,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, randomClusterSettings(), - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + List.of() ) ) { transport.start(); @@ -497,7 +504,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, randomClusterSettings(), - new SharedGroupFactory(settings) + new SharedGroupFactory(settings), + List.of() ) ) { transport.start(); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index 5dedd3705a93a..340a72e3ffbc7 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -32,6 +32,7 @@ import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.nio.NioGroupFactory; import org.elasticsearch.transport.nio.PageAllocator; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -40,6 +41,7 @@ import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.util.List; import java.util.function.Consumer; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE; @@ -82,9 +84,10 @@ public NioHttpServerTransport( NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, NioGroupFactory nioGroupFactory, - ClusterSettings clusterSettings + ClusterSettings clusterSettings, + List tracers ) { - super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings); + super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, tracers); this.pageAllocator = new PageAllocator(pageCacheRecycler); this.nioGroupFactory = nioGroupFactory; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java index 4ff1dc60d9c65..07f0ada763cc3 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java @@ -26,6 +26,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -85,16 +86,16 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher dispatcher, + ClusterSettings clusterSettings, + List tracers) { return Collections.singletonMap( NIO_HTTP_TRANSPORT_NAME, () -> new NioHttpServerTransport( @@ -106,7 +107,8 @@ public Map> getHttpTransports( xContentRegistry, dispatcher, getNioGroupFactory(settings), - clusterSettings + clusterSettings, + tracers ) ); } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java index 74ff306d8d964..bfea8914d2420 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java @@ -56,6 +56,7 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -161,7 +162,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); @@ -211,7 +213,8 @@ public void testBindUnavailableAddress() { xContentRegistry(), new NullDispatcher(), new NioGroupFactory(Settings.EMPTY, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); @@ -230,7 +233,8 @@ public void testBindUnavailableAddress() { xContentRegistry(), new NullDispatcher(), new NioGroupFactory(Settings.EMPTY, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, () -> otherTransport.start()); @@ -273,7 +277,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); @@ -347,7 +352,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new NioGroupFactory(Settings.EMPTY, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); @@ -413,7 +419,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); @@ -475,7 +482,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new NioGroupFactory(settings, logger), - randomClusterSettings() + randomClusterSettings(), + List.of() ) ) { transport.start(); diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 7a8dd042cce85..a36a76bbe324e 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -31,6 +31,7 @@ import org.elasticsearch.tasks.RawTaskStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; @@ -121,7 +122,8 @@ public NetworkModule( NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings + ClusterSettings clusterSettings, + List tracers ) { this.settings = settings; for (NetworkPlugin plugin : plugins) { @@ -134,7 +136,8 @@ public NetworkModule( xContentRegistry, networkService, dispatcher, - clusterSettings + clusterSettings, + tracers ); for (Map.Entry> entry : httpTransportFactory.entrySet()) { registerHttpTransport(entry.getKey(), entry.getValue()); diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 49b73b1e7f368..7896580de3767 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -39,6 +39,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.BindTransportException; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -100,8 +101,8 @@ protected AbstractHttpServerTransport( ThreadPool threadPool, NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + ClusterSettings clusterSettings, + List tracers) { this.settings = settings; this.networkService = networkService; this.bigArrays = bigArrays; @@ -125,7 +126,7 @@ protected AbstractHttpServerTransport( this.port = SETTING_HTTP_PORT.get(settings); this.maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings); - this.tracer = new HttpTracer(settings, clusterSettings); + this.tracer = new HttpTracer(settings, clusterSettings, tracers); clusterSettings.addSettingsUpdateConsumer( TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING, slowLogThreshold -> this.slowLogThresholdMs = slowLogThreshold.getMillis() @@ -421,7 +422,7 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan restRequest = innerRestRequest; } - final HttpTracer trace = tracer.maybeTraceRequest(restRequest, exception); + tracer.maybeLogRequest(restRequest, exception); /* * We now want to create a channel used to send the response on. However, creating this channel can fail if there are invalid @@ -442,7 +443,7 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan handlingSettings, threadContext, corsHandler, - trace + tracer ); } catch (final IllegalArgumentException e) { badRequestCause = ExceptionsHelper.useOrSuppress(badRequestCause, e); @@ -455,12 +456,14 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan handlingSettings, threadContext, corsHandler, - trace + tracer ); } channel = innerChannel; } + tracer.onTraceStarted(channel); + dispatchRequest(restRequest, channel, badRequestCause); } diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index ff07f0cdbdb3e..9eb00917b0e56 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.rest.AbstractRestChannel; @@ -28,6 +27,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.tasks.Task.X_OPAQUE_ID_HTTP_HEADER; @@ -50,9 +50,7 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann private final ThreadContext threadContext; private final HttpChannel httpChannel; private final CorsHandler corsHandler; - - @Nullable - private final HttpTracer tracerLog; + private final HttpTracer tracer; DefaultRestChannel( HttpChannel httpChannel, @@ -62,7 +60,7 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann HttpHandlingSettings settings, ThreadContext threadContext, CorsHandler corsHandler, - @Nullable HttpTracer tracerLog + HttpTracer tracer ) { super(request, settings.getDetailedErrorsEnabled()); this.httpChannel = httpChannel; @@ -71,7 +69,7 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann this.settings = settings; this.threadContext = threadContext; this.corsHandler = corsHandler; - this.tracerLog = tracerLog; + this.tracer = tracer; } @Override @@ -92,6 +90,14 @@ public void sendResponse(RestResponse restResponse) { boolean success = false; String opaque = null; String contentLength = null; + final AtomicBoolean traceStopped = new AtomicBoolean(false); + final Runnable onFinish = () -> { + Releasables.close(toClose); + if (traceStopped.compareAndSet(false, true)) { + tracer.onTraceStopped(this); + } + }; + try { final BytesReference content = restResponse.content(); if (content instanceof Releasable) { @@ -130,16 +136,15 @@ public void sendResponse(RestResponse restResponse) { addCookies(httpResponse); - ActionListener listener = ActionListener.wrap(() -> Releasables.close(toClose)); + ActionListener listener = ActionListener.wrap(onFinish); + tracer.onTraceEvent(this, "startResponse"); httpChannel.sendResponse(httpResponse, listener); success = true; } finally { if (success == false) { - Releasables.close(toClose); - } - if (tracerLog != null) { - tracerLog.traceResponse(restResponse, httpChannel, contentLength, opaque, request.getRequestId(), success); + onFinish.run(); } + tracer.maybeLogResponse(httpRequest.uri(), restResponse, httpChannel, contentLength, opaque, request.getRequestId(), success); } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 23fca1ca7fdd3..bb891b31c9541 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -15,24 +15,28 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.TransportService; import java.util.List; /** - * Http request trace logger. See {@link #maybeTraceRequest(RestRequest, Exception)} for details. + * Http request trace logger. See {@link #maybeLogRequest(RestRequest, Exception)} for details. */ class HttpTracer { private final Logger logger = LogManager.getLogger(HttpTracer.class); + private final List tracers; private volatile String[] tracerLogInclude; private volatile String[] tracerLogExclude; - HttpTracer(Settings settings, ClusterSettings clusterSettings) { + HttpTracer(Settings settings, ClusterSettings clusterSettings, List tracers) { + this.tracers = tracers; setTracerLogInclude(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_INCLUDE.get(settings)); setTracerLogExclude(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_EXCLUDE.get(settings)); @@ -41,18 +45,36 @@ class HttpTracer { clusterSettings.addSettingsUpdateConsumer(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_EXCLUDE, this::setTracerLogExclude); } + void onTraceStarted(RestChannel channel) { + final String header = channel.request().header(Task.TRACE_PARENT_HTTP_HEADER); + this.tracers.forEach(t -> { + if (header != null) { + t.setTraceParent(header); + } + t.onTraceStarted(channel); + }); + } + + void onTraceStopped(RestChannel channel) { + this.tracers.forEach(t -> { + t.onTraceStopped(channel); + }); + } + + void onTraceEvent(RestChannel channel, String eventName) { + this.tracers.forEach(t -> { + t.addEvent(channel, eventName); + }); + } + /** * Logs the given request if request tracing is enabled and the request uri matches the current include and exclude patterns defined * in {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_INCLUDE} and {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_EXCLUDE}. - * If the request was logged returns a logger to log sending the response with or {@code null} otherwise. * * @param restRequest Rest request to trace * @param e Exception when handling the request or {@code null} if none - * @return This instance to use for logging the response via {@link #traceResponse} to this request if it was logged or - * {@code null} if the request wasn't logged */ - @Nullable - HttpTracer maybeTraceRequest(RestRequest restRequest, @Nullable Exception e) { + void maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { if (logger.isTraceEnabled() && TransportService.shouldTraceAction(restRequest.uri(), tracerLogInclude, tracerLogExclude)) { logger.trace( new ParameterizedMessage( @@ -65,14 +87,13 @@ HttpTracer maybeTraceRequest(RestRequest restRequest, @Nullable Exception e) { ), e ); - return this; } - return null; } /** - * Logs the response to a request that was logged by {@link #maybeTraceRequest(RestRequest, Exception)}. + * Logs the response to a request that was logged by {@link #maybeLogRequest(RestRequest, Exception)}. * + * @param uri * @param restResponse RestResponse * @param httpChannel HttpChannel the response was sent on * @param contentLength Value of the response content length header @@ -80,7 +101,8 @@ HttpTracer maybeTraceRequest(RestRequest restRequest, @Nullable Exception e) { * @param requestId Request id as returned by {@link RestRequest#getRequestId()} * @param success Whether the response was successfully sent */ - void traceResponse( + void maybeLogResponse( + String uri, RestResponse restResponse, HttpChannel httpChannel, String contentLength, @@ -88,18 +110,20 @@ void traceResponse( long requestId, boolean success ) { - logger.trace( - new ParameterizedMessage( - "[{}][{}][{}][{}][{}] sent response to [{}] success [{}]", - requestId, - opaqueHeader, - restResponse.status(), - restResponse.contentType(), - contentLength, - httpChannel, - success - ) - ); + if (logger.isTraceEnabled() && TransportService.shouldTraceAction(uri, tracerLogInclude, tracerLogExclude)) { + logger.trace( + new ParameterizedMessage( + "[{}][{}][{}][{}][{}] sent response to [{}] success [{}]", + requestId, + opaqueHeader, + restResponse.status(), + restResponse.contentType(), + contentLength, + httpChannel, + success + ) + ); + } } private void setTracerLogInclude(List tracerLogInclude) { diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 930c50758d1d7..91f5cfe58a0f3 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -701,9 +701,8 @@ protected Node( .collect(Collectors.toList()); final List tracers = pluginComponents.stream() - .map(c -> c instanceof Tracer ? (Tracer) c : null) - .filter(Objects::nonNull) - .collect(Collectors.toUnmodifiableList()); + .map(c -> c instanceof Tracer t ? t : null) + .filter(Objects::nonNull).toList(); ActionModule actionModule = new ActionModule( settings, @@ -733,7 +732,8 @@ protected Node( xContentRegistry, networkService, restController, - clusterService.getClusterSettings() + clusterService.getClusterSettings(), + tracers ); Collection>> indexTemplateMetadataUpgraders = pluginsService.filterPlugins( Plugin.class diff --git a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java index 132f64d1bb61c..607327842d311 100644 --- a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java @@ -17,6 +17,7 @@ import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -74,8 +75,8 @@ default Map> getHttpTransports( NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + ClusterSettings clusterSettings, + List tracers) { return Collections.emptyMap(); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index 708a96ca0c48d..1000455896615 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -10,15 +10,17 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.tracing.Traceable; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.Map; /** * A channel used to construct bytes / builder based outputs, and send responses. */ -public interface RestChannel { +public interface RestChannel extends Traceable { XContentBuilder newBuilder() throws IOException; @@ -39,4 +41,25 @@ XContentBuilder newBuilder(@Nullable XContentType xContentType, @Nullable XConte boolean detailedErrorsEnabled(); void sendResponse(RestResponse response); + + @Override + default String getSpanId() { + return "rest-" + this.request().getRequestId(); + } + + @Override + default String getSpanName() { + return this.request().path(); + } + + @Override + default Map getAttributes() { + var req = this.request(); + return Map.of( + "http.method", + req.method().name(), + "http.url", + req.uri() + ); + } } diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index dadf22bb0e387..2705e3ee960f6 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -31,4 +31,6 @@ public interface Tracer { Map getSpanHeadersById(String id); void setTraceParent(String traceId); + + void addEvent(Traceable traceable, String name); } diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 78e83c019be25..4b7d76406cefe 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportRequest; @@ -111,16 +112,16 @@ public void testRegisterHttpTransport() { NetworkModule module = newNetworkModule(settings, new NetworkPlugin() { @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings - ) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers) { return Collections.singletonMap("custom", custom); } }); @@ -156,16 +157,16 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings - ) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); supplierMap.put("default_custom", def); @@ -199,16 +200,16 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings - ) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); supplierMap.put("default_custom", def); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 2d6ce3e0b5f02..18129d0b86aac 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -161,8 +161,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th threadPool, xContentRegistry(), dispatcher, - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) - ) { + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -275,8 +275,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, assertThat(mediaTypeHeaderException.getMessage(), equalTo("Invalid media-type value on headers " + failedHeaderNames)); } }, - clusterSettings - ) { + clusterSettings, + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -325,8 +325,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, channel.sendResponse(emptyResponse(RestStatus.BAD_REQUEST)); } }, - clusterSettings - ) { + clusterSettings, + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -478,8 +478,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, channel.sendResponse(emptyResponse(RestStatus.BAD_REQUEST)); } }, - clusterSettings - ) { + clusterSettings, + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -534,8 +534,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, channel.sendResponse(emptyResponse(RestStatus.BAD_REQUEST)); } }, - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) - ) { + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -609,8 +609,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, channel.sendResponse(emptyResponse(RestStatus.BAD_REQUEST)); } }, - clusterSettings - ) { + clusterSettings, + tracers) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index a40721de154ac..e2f44fd91374e 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; @@ -85,7 +86,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map> spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; @@ -265,13 +266,14 @@ public void onTraceStarted(Traceable traceable) { // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in // 7.16. spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().toString()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); if (xOpaqueId != null) { spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); } - return spanBuilder.startSpan(); + final Span span = spanBuilder.startSpan(); + return Tuple.tuple(span, span.makeCurrent()); }); } @@ -312,6 +314,14 @@ public void setTraceParent(String traceparent) { .makeCurrent(); } + @Override + public void addEvent(Traceable traceable, String name) { + final Tuple tuple = spans.get(traceable.getSpanId()); + if (tuple != null) { + tuple.v1().addEvent(name); + } + } + @Override public Map getSpanHeadersById(String id) { var services = this.services; @@ -319,19 +329,20 @@ public Map getSpanHeadersById(String id) { if (span == null || services == null) { return null; } - try (Scope ignore = span.makeCurrent()) { +// try (Scope ignore = span.makeCurrent()) { Map spanHeaders = new HashMap<>(); services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); return spanHeaders; - } +// } } @Override public void onTraceStopped(Traceable traceable) { - final Span span = spans.remove(traceable.getSpanId()); - if (span != null) { - span.end(); + final Tuple tuple = spans.remove(traceable.getSpanId()); + if (tuple != null) { + tuple.v2().close(); + tuple.v1().end(); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 7e37ebfc4d22e..c1c8cfb18cbad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -87,6 +87,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.watcher.ResourceWatcherService; @@ -392,16 +393,16 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher dispatcher, + ClusterSettings clusterSettings, + List tracers) { Map> transports = new HashMap<>(); filterPlugins(NetworkPlugin.class).stream() .forEach( @@ -415,8 +416,8 @@ public Map> getHttpTransports( xContentRegistry, networkService, dispatcher, - clusterSettings - ) + clusterSettings, + null) ) ); return transports; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 77efab96a6184..4809091762225 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1517,8 +1517,8 @@ public Map> getHttpTransports( NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings - ) { + ClusterSettings clusterSettings, + List tracers) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyMap(); } @@ -1536,8 +1536,8 @@ public Map> getHttpTransports( xContentRegistry, dispatcher, clusterSettings, - getNettySharedGroupFactory(settings) - ) + getNettySharedGroupFactory(settings), + tracers) ); httpTransports.put( SecurityField.NIO, @@ -1552,7 +1552,8 @@ public Map> getHttpTransports( ipFilter.get(), getSslService(), getNioGroupFactory(settings), - clusterSettings + clusterSettings, + tracers ) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index 5875f43a5d741..539321060fb9b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -20,6 +20,7 @@ import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.netty4.Netty4HttpServerTransport; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -28,6 +29,8 @@ import javax.net.ssl.SSLEngine; +import java.util.List; + import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport { @@ -48,9 +51,10 @@ public SecurityNetty4HttpServerTransport( NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, ClusterSettings clusterSettings, - SharedGroupFactory sharedGroupFactory + SharedGroupFactory sharedGroupFactory, + List tracers ) { - super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory); + super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory, tracers); this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e)); this.ipFilter = ipFilter; final boolean ssl = HTTP_SSL_ENABLED.get(settings); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java index a1578f103e624..7a4bd78626093 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java @@ -28,6 +28,7 @@ import org.elasticsearch.nio.ServerChannelContext; import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.nio.NioGroupFactory; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -38,6 +39,7 @@ import java.net.InetSocketAddress; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.util.List; import java.util.function.Consumer; import javax.net.ssl.SSLEngine; @@ -64,7 +66,8 @@ public SecurityNioHttpServerTransport( IPFilter ipFilter, SSLService sslService, NioGroupFactory nioGroupFactory, - ClusterSettings clusterSettings + ClusterSettings clusterSettings, + List tracers ) { super( settings, @@ -75,7 +78,8 @@ public SecurityNioHttpServerTransport( xContentRegistry, dispatcher, nioGroupFactory, - clusterSettings + clusterSettings, + tracers ); this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e)); this.ipFilter = ipFilter; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 7fe9c51e2f56e..743aa4e59f8ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -29,6 +29,7 @@ import java.nio.file.Path; import java.util.Collections; +import java.util.List; import javax.net.ssl.SSLEngine; @@ -76,8 +77,8 @@ public void testDefaultClientAuth() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -102,8 +103,8 @@ public void testOptionalClientAuth() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -128,8 +129,8 @@ public void testRequiredClientAuth() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); @@ -154,8 +155,8 @@ public void testNoClientAuth() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -175,8 +176,8 @@ public void testCustomSSLConfiguration() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); ChannelHandler handler = transport.configureServerChannelHandler(); EmbeddedChannel ch = new EmbeddedChannel(handler); SSLEngine defaultEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -197,8 +198,8 @@ public void testCustomSSLConfiguration() throws Exception { xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); handler = transport.configureServerChannelHandler(); ch = new EmbeddedChannel(handler); SSLEngine customEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -228,8 +229,8 @@ public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Excep xContentRegistry(), new NullDispatcher(), randomClusterSettings(), - new SharedGroupFactory(settings) - ); + new SharedGroupFactory(settings), + List.of()); assertNotNull(transport.configureServerChannelHandler()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java index bf6e272c6e949..b6a7a04d7b866 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java @@ -34,6 +34,7 @@ import java.nio.channels.SocketChannel; import java.nio.file.Path; import java.util.Collections; +import java.util.List; import javax.net.ssl.SSLEngine; @@ -83,7 +84,8 @@ public void testDefaultClientAuth() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory(); SocketChannel socketChannel = mock(SocketChannel.class); @@ -115,7 +117,8 @@ public void testOptionalClientAuth() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory(); @@ -147,7 +150,8 @@ public void testRequiredClientAuth() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory(); @@ -179,7 +183,8 @@ public void testNoClientAuth() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory(); @@ -206,7 +211,8 @@ public void testCustomSSLConfiguration() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory(); SocketChannel socketChannel = mock(SocketChannel.class); @@ -232,7 +238,8 @@ public void testCustomSSLConfiguration() throws IOException { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); factory = transport.channelFactory(); channel = factory.createChannel(mock(NioSelector.class), socketChannel, mock(Config.Socket.class)); @@ -267,7 +274,8 @@ public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() { mock(IPFilter.class), sslService, nioGroupFactory, - randomClusterSettings() + randomClusterSettings(), + List.of() ); } } From 4e7f9dcab9f4dd5faf8acc344288467bc3f60552 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 16 Mar 2022 09:24:23 +0000 Subject: [PATCH 25/90] WIP - more messing around --- .../elasticsearch/action/ActionModule.java | 2 +- .../org/elasticsearch/http/HttpTracer.java | 15 +++-- .../elasticsearch/rest/RestController.java | 7 +-- .../org/elasticsearch/tracing/Tracer.java | 8 ++- .../rest/RestControllerTests.java | 26 ++++---- .../rest/RestHttpResponseHeadersTests.java | 2 +- .../indices/RestValidateQueryActionTests.java | 2 +- .../test/rest/RestActionTestCase.java | 2 +- .../elasticsearch/xpack/apm/APMTracer.java | 59 +++++++++---------- .../action/RestTermsEnumActionTests.java | 2 +- 10 files changed, 60 insertions(+), 65 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 0877da0f07e46..79f7709053c19 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -506,7 +506,7 @@ public ActionModule( actionPlugins.stream().flatMap(p -> p.indicesAliasesRequestValidators().stream()).collect(Collectors.toList()) ); - restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService, tracers); + restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService); } public Map> getActions() { diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index bb891b31c9541..4a8f5665e843f 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -46,13 +47,15 @@ class HttpTracer { } void onTraceStarted(RestChannel channel) { - final String header = channel.request().header(Task.TRACE_PARENT_HTTP_HEADER); - this.tracers.forEach(t -> { - if (header != null) { - t.setTraceParent(header); + final List headerValues = channel.request().getAllHeaderValues(Task.TRACE_PARENT_HTTP_HEADER); + if (headerValues != null && headerValues.size() == 1) { + String traceparent = headerValues.get(0); + if (traceparent.length() >= 55) { + this.tracers.forEach(t -> t.onTraceStarted(channel, traceparent)); } - t.onTraceStarted(channel); - }); + } else { + this.tracers.forEach(t -> t.onTraceStarted(channel)); + } } void onTraceStopped(RestChannel channel) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 564769c408814..079433dc4ecab 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -29,7 +29,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tracing.Tracer; import org.elasticsearch.usage.UsageService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -92,15 +91,13 @@ public class RestController implements HttpServerTransport.Dispatcher { /** Rest headers that are copied to internal requests made during a rest request. */ private final Set headersToCopy; private final UsageService usageService; - private final List tracers; public RestController( Set headersToCopy, UnaryOperator handlerWrapper, NodeClient client, CircuitBreakerService circuitBreakerService, - UsageService usageService, - List tracers) { + UsageService usageService) { this.headersToCopy = headersToCopy; this.usageService = usageService; if (handlerWrapper == null) { @@ -109,7 +106,6 @@ public RestController( this.handlerWrapper = handlerWrapper; this.client = client; this.circuitBreakerService = circuitBreakerService; - this.tracers = tracers; registerHandlerNoWrap( RestRequest.Method.GET, "/favicon.ico", @@ -504,7 +500,6 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t final String traceId = traceparent.substring(3, 35); threadContext.putHeader(Task.TRACE_ID, traceId); threadContext.putHeader(Task.TRACE_PARENT_HTTP_HEADER, traceparent); - tracers.forEach(t -> t.setTraceParent(traceparent)); } } else { threadContext.putHeader(name, String.join(",", distinctHeaderValues)); diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index 2705e3ee960f6..bcd24a758c910 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -18,7 +18,11 @@ public interface Tracer { /** * Called when the {@link Traceable} activity starts. */ - void onTraceStarted(Traceable traceable); + default void onTraceStarted(Traceable traceable) { + this.onTraceStarted(traceable, null); + } + + void onTraceStarted(Traceable traceable, String traceparent); /** * Called when the {@link Traceable} activity ends. @@ -30,7 +34,5 @@ public interface Tracer { */ Map getSpanHeadersById(String id); - void setTraceParent(String traceId); - void addEvent(Traceable traceable, String name); } diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index e60a01de2f4be..542f34f69585f 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -94,7 +94,7 @@ public void setup() { HttpServerTransport httpServerTransport = new TestHttpServerTransport(); client = new NoOpNodeClient(this.getTestName()); - restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); restController.registerHandler( new Route(GET, "/"), (request, channel, client) -> channel.sendResponse( @@ -118,7 +118,7 @@ public void testApplyRelevantHeaders() throws Exception { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", true)) ); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("true")); restHeaders.put("header.2", Collections.singletonList("true")); @@ -160,7 +160,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); @@ -173,7 +173,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { public void testTraceParentAndTraceId() throws Exception { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set headers = new HashSet<>(Arrays.asList(new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false))); - final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService, tracers); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put( Task.TRACE_PARENT_HTTP_HEADER, @@ -208,7 +208,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(headers, null, client, circuitBreakerService, usageService, tracers); + final RestController restController = new RestController(headers, null, client, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "foo")); @@ -279,7 +279,7 @@ public void testRegisterAsReplacedHandler() { } public void testRegisterSecondMethodWithDifferentNamedWildcard() { - final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, tracers); + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService); RestRequest.Method firstMethod = randomFrom(RestRequest.Method.values()); RestRequest.Method secondMethod = randomFrom( @@ -308,7 +308,7 @@ public void testRestHandlerWrapper() throws Exception { final RestController restController = new RestController(Collections.emptySet(), h -> { assertSame(handler, h); return (RestRequest request, RestChannel channel, NodeClient client) -> wrapperCalled.set(true); - }, client, circuitBreakerService, usageService, tracers); + }, client, circuitBreakerService, usageService); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); @@ -371,7 +371,7 @@ public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); - restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService, tracers); + restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); restController.registerHandler( new Route(GET, "/"), (r, c, client) -> c.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)) @@ -691,7 +691,7 @@ public Exception getInboundException() { public void testDispatchCompatibleHandler() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -715,7 +715,7 @@ public void testDispatchCompatibleHandler() { public void testDispatchCompatibleRequestToNewlyAddedHandler() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -750,7 +750,7 @@ private FakeRestRequest requestWithContent(String mediaType) { } public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); final RestApiVersion version = RestApiVersion.current(); @@ -775,7 +775,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { } public void testCustomMediaTypeValidation() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -801,7 +801,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } public void testBrowserSafelistedContentTypesAreRejected() { - RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 6b3004bb5ee5a..2c18994462eab 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -80,7 +80,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { final Settings settings = Settings.EMPTY; UsageService usageService = new UsageService(); - RestController restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService, tracers); + RestController restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); // A basic RestHandler handles requests to the endpoint RestHandler restHandler = (request, channel, client) -> channel.sendResponse(new TestResponse()); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 7ad1b72858244..e2612383ce04c 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -54,7 +54,7 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { private NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private UsageService usageService = new UsageService(); - private RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService, tracers); + private RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService); private RestValidateQueryAction action = new RestValidateQueryAction(); /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java index 4d50aacd65cb0..b539338337ca9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java @@ -40,7 +40,7 @@ public abstract class RestActionTestCase extends ESTestCase { @Before public void setUpController() { verifyingClient = new VerifyingClient(this.getTestName()); - controller = new RestController(Collections.emptySet(), null, verifyingClient, new NoneCircuitBreakerService(), new UsageService(), tracers); + controller = new RestController(Collections.emptySet(), null, verifyingClient, new NoneCircuitBreakerService(), new UsageService()); } @After diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index e2f44fd91374e..e680957f298f7 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -11,6 +11,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.Context; @@ -43,7 +44,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; @@ -86,7 +86,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); - private final Map> spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; @@ -216,7 +216,7 @@ private void destroyApmServices() { } @Override - public void onTraceStarted(Traceable traceable) { + public void onTraceStarted(Traceable traceable, String traceparent) { var services = this.services; if (services == null) { return; @@ -229,7 +229,7 @@ public void onTraceStarted(Traceable traceable) { spans.computeIfAbsent(traceable.getSpanId(), spanId -> { // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(services.openTelemetry); + Context parentContext = getParentSpanContext(services.openTelemetry, traceparent); if (parentContext != null) { spanBuilder.setParent(parentContext); } @@ -272,8 +272,7 @@ public void onTraceStarted(Traceable traceable) { if (xOpaqueId != null) { spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); } - final Span span = spanBuilder.startSpan(); - return Tuple.tuple(span, span.makeCurrent()); + return spanBuilder.startSpan(); }); } @@ -283,42 +282,39 @@ private boolean isSpanNameIncluded(String name) { return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); } - private Context getParentSpanContext(OpenTelemetry openTelemetry) { + private Context getParentSpanContext(OpenTelemetry openTelemetry, String traceParent) { +// if (traceParent != null) { +// // traceparent and tracestate should match the keys used by W3CTraceContextPropagator +// return openTelemetry.getPropagators() +// .getTextMapPropagator() +// .extract(Context.current(), Map.of(Task.TRACE_PARENT_HTTP_HEADER, traceParent), new MapKeyGetter()); +// } + // If we already have a non-root span context that should be the parent if (Context.current() != Context.root()) { return Context.current(); } // If not let us check for a parent context in the thread context - String traceParent = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); - String traceState = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); - if (traceParent != null) { + String traceParentHeader = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); + String traceStateHeader = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); + if (traceParentHeader != null) { Map traceContextMap = new HashMap<>(); // traceparent and tracestate should match the keys used by W3CTraceContextPropagator - traceContextMap.put(Task.TRACE_PARENT_HTTP_HEADER, traceParent); - if (traceState != null) { - traceContextMap.put(Task.TRACE_STATE, traceState); + traceContextMap.put(Task.TRACE_PARENT_HTTP_HEADER, traceParentHeader); + if (traceStateHeader != null) { + traceContextMap.put(Task.TRACE_STATE, traceStateHeader); } return openTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), traceContextMap, new MapKeyGetter()); } return null; } - @Override - public void setTraceParent(String traceparent) { - // traceparent and tracestate should match the keys used by W3CTraceContextPropagator - // TODO tracestate? - services.openTelemetry.getPropagators() - .getTextMapPropagator() - .extract(Context.current(), Map.of(Task.TRACE_PARENT_HTTP_HEADER, traceparent), new MapKeyGetter()) - .makeCurrent(); - } - @Override public void addEvent(Traceable traceable, String name) { - final Tuple tuple = spans.get(traceable.getSpanId()); - if (tuple != null) { - tuple.v1().addEvent(name); + final Span span = spans.get(traceable.getSpanId()); + if (span != null) { + span.addEvent(name); } } @@ -329,20 +325,19 @@ public Map getSpanHeadersById(String id) { if (span == null || services == null) { return null; } -// try (Scope ignore = span.makeCurrent()) { + try (Scope ignore = span.makeCurrent()) { Map spanHeaders = new HashMap<>(); services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); return spanHeaders; -// } + } } @Override public void onTraceStopped(Traceable traceable) { - final Tuple tuple = spans.remove(traceable.getSpanId()); - if (tuple != null) { - tuple.v2().close(); - tuple.v1().end(); + final Span span = spans.remove(traceable.getSpanId()); + if (span != null) { + span.end(); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index 42586464bbd74..cc67e727aa6e1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -52,7 +52,7 @@ public class RestTermsEnumActionTests extends ESTestCase { private static NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private static UsageService usageService = new UsageService(); - private static RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService, tracers); + private static RestController controller = new RestController(emptySet(), null, client, new NoneCircuitBreakerService(), usageService); private static RestTermsEnumAction action = new RestTermsEnumAction(); /** From 11dc5e13a9723977783184b5ec16fa4416d1d739 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 16 Mar 2022 12:58:14 +0000 Subject: [PATCH 26/90] HACK HACK HACK --- .../http/AbstractHttpServerTransport.java | 8 ++- .../http/DefaultRestChannel.java | 10 +++ .../org/elasticsearch/http/HttpTracer.java | 14 +--- .../org/elasticsearch/rest/RestChannel.java | 5 ++ .../elasticsearch/rest/RestController.java | 12 +++- .../org/elasticsearch/tasks/TaskManager.java | 4 +- .../org/elasticsearch/tasks/TaskTracer.java | 5 +- .../org/elasticsearch/tracing/Tracer.java | 15 +---- .../org/elasticsearch/xpack/apm/ApmIT.java | 2 +- .../java/org/elasticsearch/xpack/apm/APM.java | 58 ----------------- .../elasticsearch/xpack/apm/APMTracer.java | 65 ++++++++----------- .../xpack/security/AuthorizationTracer.java | 2 +- 12 files changed, 73 insertions(+), 127 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 7896580de3767..7cb8b227f5b7a 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -430,10 +430,10 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of these * parameter values. */ + final ThreadContext threadContext = threadPool.getThreadContext(); final RestChannel channel; { RestChannel innerChannel; - ThreadContext threadContext = threadPool.getThreadContext(); try { innerChannel = new DefaultRestChannel( httpChannel, @@ -462,11 +462,15 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan channel = innerChannel; } - tracer.onTraceStarted(channel); + onTraceStarted(threadContext, channel); dispatchRequest(restRequest, channel, badRequestCause); } + protected void onTraceStarted(ThreadContext threadContext, RestChannel restChannel) { + tracer.onTraceStarted(threadContext, restChannel); + } + private RestRequest requestWithoutFailedHeader( HttpRequest httpRequest, HttpChannel httpChannel, diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 9eb00917b0e56..531661d5afab7 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -178,4 +178,14 @@ private void addCookies(HttpResponse response) { } } } + + @Override + public void startTrace(ThreadContext threadContext) { + this.tracer.onTraceStarted(threadContext, this); + } + + @Override + public void stopTrace() { + this.tracer.onTraceStopped(this); + } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 4a8f5665e843f..14c550c9bd64c 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -46,16 +46,8 @@ class HttpTracer { clusterSettings.addSettingsUpdateConsumer(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_EXCLUDE, this::setTracerLogExclude); } - void onTraceStarted(RestChannel channel) { - final List headerValues = channel.request().getAllHeaderValues(Task.TRACE_PARENT_HTTP_HEADER); - if (headerValues != null && headerValues.size() == 1) { - String traceparent = headerValues.get(0); - if (traceparent.length() >= 55) { - this.tracers.forEach(t -> t.onTraceStarted(channel, traceparent)); - } - } else { - this.tracers.forEach(t -> t.onTraceStarted(channel)); - } + void onTraceStarted(ThreadContext threadContext, RestChannel channel) { + this.tracers.forEach(t -> t.onTraceStarted(threadContext, channel)); } void onTraceStopped(RestChannel channel) { @@ -66,7 +58,7 @@ void onTraceStopped(RestChannel channel) { void onTraceEvent(RestChannel channel, String eventName) { this.tracers.forEach(t -> { - t.addEvent(channel, eventName); + t.onTraceEvent(channel, eventName); }); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index 1000455896615..ae5af237cda6d 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.tracing.Traceable; import org.elasticsearch.xcontent.XContentBuilder; @@ -62,4 +63,8 @@ default Map getAttributes() { req.uri() ); } + + void startTrace(ThreadContext threadContext); + + void stopTrace(); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 079433dc4ecab..521f1c4dc67a0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -302,6 +302,8 @@ public void registerHandler(final RestHandler handler) { public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE); try { + copyRestHeaders(request, threadContext); + channel.startTrace(threadContext); tryAllHandlers(request, channel, threadContext); } catch (Exception e) { try { @@ -436,7 +438,6 @@ private void sendContentTypeErrorMessage(@Nullable List contentTypeHeade private void tryAllHandlers(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) throws Exception { try { - copyRestHeaders(request, threadContext); validateErrorTrace(request, channel); } catch (IllegalArgumentException e) { channel.sendResponse(BytesRestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, e.getMessage())); @@ -674,6 +675,15 @@ private void close() { inFlightRequestsBreaker(circuitBreakerService).addWithoutBreaking(-contentLength); } + @Override + public void startTrace(ThreadContext threadContext) { + delegate.startTrace(threadContext); + } + + @Override + public void stopTrace() { + delegate.stopTrace(); + } } private static CircuitBreaker inFlightRequestsBreaker(CircuitBreakerService circuitBreakerService) { diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 8c9cd25ff75b3..d20e45162cfdd 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -143,7 +143,7 @@ public Task register(String type, String action, TaskAwareRequest request) { } else { Task previousTask = tasks.put(task.getId(), task); assert previousTask == null; - taskTracer.onTaskRegistered(task); + taskTracer.onTaskRegistered(threadContext, task); } return task; } @@ -196,7 +196,7 @@ private void registerCancellableTask(Task task) { CancellableTask cancellableTask = (CancellableTask) task; CancellableTaskHolder holder = new CancellableTaskHolder(cancellableTask); cancellableTasks.put(task, holder); - taskTracer.onTaskRegistered(task); + taskTracer.onTaskRegistered(threadPool.getThreadContext(), task); // Check if this task was banned before we start it. The empty check is used to avoid // computing the hash code of the parent taskId as most of the time bannedParents is empty. if (task.getParentTaskId().isSet() && bannedParents.isEmpty() == false) { diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java index f12f94c57485c..c0b2091a1fa9f 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tracing.Tracer; import java.util.List; @@ -28,10 +29,10 @@ public void addTracer(Tracer tracer) { } } - public void onTaskRegistered(Task task) { + public void onTaskRegistered(ThreadContext threadContext, Task task) { for (Tracer tracer : tracers) { try { - tracer.onTraceStarted(task); + tracer.onTraceStarted(threadContext, task); } catch (Exception e) { assert false : e; logger.warn( diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index bcd24a758c910..e8c4e5e49d4a6 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -8,7 +8,7 @@ package org.elasticsearch.tracing; -import java.util.Map; +import org.elasticsearch.common.util.concurrent.ThreadContext; /** * Represents a distributed tracing system that keeps track of the start and end of various activities in the cluster. @@ -18,21 +18,12 @@ public interface Tracer { /** * Called when the {@link Traceable} activity starts. */ - default void onTraceStarted(Traceable traceable) { - this.onTraceStarted(traceable, null); - } - - void onTraceStarted(Traceable traceable, String traceparent); + void onTraceStarted(ThreadContext threadContext, Traceable traceable); /** * Called when the {@link Traceable} activity ends. */ void onTraceStopped(Traceable traceable); - /** - * Retrieve context related headers for the span of the given id. - */ - Map getSpanHeadersById(String id); - - void addEvent(Traceable traceable, String name); + void onTraceEvent(Traceable traceable, String eventName); } diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index e7546795d3504..08f1090de8edd 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -100,7 +100,7 @@ public void testModule() { APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - taskTracer.onTaskRegistered(testTask); + taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); taskTracer.onTaskUnregistered(testTask); final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index dd57efe9f596c..1b992ffaf9227 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -14,8 +14,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Releasable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.NetworkPlugin; @@ -24,12 +22,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportInterceptor; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.transport.TransportRequestOptions; -import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -76,54 +68,4 @@ public List> getSettings() { APMTracer.APM_SAMPLE_RATE_SETTING ); } - - public List getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { - return List.of(new TransportInterceptor() { - @Override - public AsyncSender interceptSender(AsyncSender sender) { - return new ApmTransportInterceptor(sender, threadContext); - } - }); - } - - private class ApmTransportInterceptor implements TransportInterceptor.AsyncSender { - - private final TransportInterceptor.AsyncSender sender; - private final ThreadContext threadContext; - - ApmTransportInterceptor(TransportInterceptor.AsyncSender sender, ThreadContext threadContext) { - this.sender = sender; - this.threadContext = threadContext; - } - - @Override - public void sendRequest( - Transport.Connection connection, - String action, - TransportRequest request, - TransportRequestOptions options, - TransportResponseHandler handler - ) { - try (var ignored = withParentContext(String.valueOf(request.getParentTask().getId()))) { - sender.sendRequest(connection, action, request, options, handler); - } - } - - private Releasable withParentContext(String parentTaskId) { - var aTracer = tracer.get(); - if (aTracer == null) { - return null; - } - if (aTracer.isEnabled() == false) { - return null; - } - var headers = aTracer.getSpanHeadersById(parentTaskId); - if (headers == null) { - return null; - } - final Releasable releasable = threadContext.removeRequestHeaders(TRACE_HEADERS); - threadContext.putHeader(headers); - return releasable; - } - } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index e680957f298f7..7dce3ad1a4109 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -11,7 +11,6 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; -import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.Context; @@ -44,6 +43,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; @@ -66,6 +68,7 @@ import static org.elasticsearch.common.settings.Setting.Property.Dynamic; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; +import static org.elasticsearch.xpack.apm.APM.TRACE_HEADERS; public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { @@ -86,7 +89,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map> spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; @@ -216,7 +219,7 @@ private void destroyApmServices() { } @Override - public void onTraceStarted(Traceable traceable, String traceparent) { + public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { var services = this.services; if (services == null) { return; @@ -229,7 +232,7 @@ public void onTraceStarted(Traceable traceable, String traceparent) { spans.computeIfAbsent(traceable.getSpanId(), spanId -> { // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(services.openTelemetry, traceparent); + Context parentContext = getParentSpanContext(services.openTelemetry); if (parentContext != null) { spanBuilder.setParent(parentContext); } @@ -272,7 +275,16 @@ public void onTraceStarted(Traceable traceable, String traceparent) { if (xOpaqueId != null) { spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); } - return spanBuilder.startSpan(); + final Span span = spanBuilder.startSpan(); + Releasable previousContext; + try (Scope ignore = span.makeCurrent()) { + Map spanHeaders = new HashMap<>(); + services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); + spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + previousContext = threadContext.removeRequestHeaders(TRACE_HEADERS); + threadContext.putHeader(spanHeaders); + } + return Tuple.tuple(span, previousContext::close); }); } @@ -282,14 +294,7 @@ private boolean isSpanNameIncluded(String name) { return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); } - private Context getParentSpanContext(OpenTelemetry openTelemetry, String traceParent) { -// if (traceParent != null) { -// // traceparent and tracestate should match the keys used by W3CTraceContextPropagator -// return openTelemetry.getPropagators() -// .getTextMapPropagator() -// .extract(Context.current(), Map.of(Task.TRACE_PARENT_HTTP_HEADER, traceParent), new MapKeyGetter()); -// } - + private Context getParentSpanContext(OpenTelemetry openTelemetry) { // If we already have a non-root span context that should be the parent if (Context.current() != Context.root()) { return Context.current(); @@ -311,33 +316,19 @@ private Context getParentSpanContext(OpenTelemetry openTelemetry, String tracePa } @Override - public void addEvent(Traceable traceable, String name) { - final Span span = spans.get(traceable.getSpanId()); - if (span != null) { - span.addEvent(name); - } - } - - @Override - public Map getSpanHeadersById(String id) { - var services = this.services; - var span = spans.get(id); - if (span == null || services == null) { - return null; - } - try (Scope ignore = span.makeCurrent()) { - Map spanHeaders = new HashMap<>(); - services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); - spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - return spanHeaders; + public void onTraceStopped(Traceable traceable) { + final var spanTuple = spans.remove(traceable.getSpanId()); + if (spanTuple != null) { + spanTuple.v1().end(); + spanTuple.v2().run(); } } @Override - public void onTraceStopped(Traceable traceable) { - final Span span = spans.remove(traceable.getSpanId()); - if (span != null) { - span.end(); + public void onTraceEvent(Traceable traceable, String eventName) { + final var spanTuple = spans.get(traceable.getSpanId()); + if (spanTuple != null) { + spanTuple.v1().addEvent(eventName); } } @@ -422,6 +413,6 @@ public String get(Map carrier, String key) { } private static boolean isSupportedContextKey(String key) { - return APM.TRACE_HEADERS.contains(key); + return TRACE_HEADERS.contains(key); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java index b1fffbc0986c2..9d1b9c8cf4ba3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java @@ -37,7 +37,7 @@ public void addTracer(Tracer tracer) { public Runnable startTracing(Traceable traceable) { for (Tracer tracer : tracers) { try { - tracer.onTraceStarted(traceable); + tracer.onTraceStarted(threadContext, traceable); } catch (Exception e) { assert false : e; logger.warn( From e7cca58562146f750a5986f37f6e49808b535590 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 16 Mar 2022 14:58:29 +0000 Subject: [PATCH 27/90] OMG I think it's working --- .../elasticsearch/action/ActionModule.java | 1 + .../rest/AbstractRestChannel.java | 11 +++++ .../org/elasticsearch/rest/RestChannel.java | 2 +- .../elasticsearch/xpack/apm/APMTracer.java | 40 ++++++++++--------- 4 files changed, 34 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 79f7709053c19..c9cf41c11275a 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -475,6 +475,7 @@ public ActionModule( actionPlugins.stream().flatMap(p -> p.getRestHeaders().stream()), Stream.of( new RestHeaderDefinition(Task.X_OPAQUE_ID_HTTP_HEADER, false), + new RestHeaderDefinition(Task.TRACE_STATE, false), new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false), new RestHeaderDefinition(Task.X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER, false) ) diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index fdcf0c92bd993..58dc097d7bcc9 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.XContentBuilder; @@ -191,4 +192,14 @@ public RestRequest request() { public boolean detailedErrorsEnabled() { return detailedErrorsEnabled; } + + @Override + public void startTrace(ThreadContext threadContext) { + // no op + } + + @Override + public void stopTrace() { + // no op + } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index ae5af237cda6d..0996616a060f2 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -50,7 +50,7 @@ default String getSpanId() { @Override default String getSpanName() { - return this.request().path(); + return this.request().method() + " " + this.request().path(); } @Override diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 7dce3ad1a4109..6abab668c8be2 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -14,7 +14,6 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.Context; -import io.opentelemetry.context.Scope; import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; @@ -89,7 +88,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); - private final Map> spans = ConcurrentCollections.newConcurrentMap(); + private final Map> spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; @@ -232,8 +231,13 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { spans.computeIfAbsent(traceable.getSpanId(), spanId -> { // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(services.openTelemetry); + Context parentContext = getParentSpanContext(); if (parentContext != null) { + if (traceable.getSpanName().startsWith("GET") || traceable.getSpanName().startsWith("POST")) { + String traceParentHeader = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); + String traceStateHeader = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); + LOGGER.warn("BADGER {} {}", traceParentHeader, traceStateHeader); + } spanBuilder.setParent(parentContext); } @@ -276,14 +280,15 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); } final Span span = spanBuilder.startSpan(); - Releasable previousContext; - try (Scope ignore = span.makeCurrent()) { - Map spanHeaders = new HashMap<>(); - services.openTelemetry.getPropagators().getTextMapPropagator().inject(Context.current(), spanHeaders, Map::put); - spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - previousContext = threadContext.removeRequestHeaders(TRACE_HEADERS); - threadContext.putHeader(spanHeaders); - } + + final Map spanHeaders = new HashMap<>(); + final Context contextForNewSpan = Context.current().with(span); + services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); + spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + + Releasable previousContext = threadContext.removeRequestHeaders(TRACE_HEADERS); + threadContext.putHeader(spanHeaders); + return Tuple.tuple(span, previousContext::close); }); } @@ -294,13 +299,8 @@ private boolean isSpanNameIncluded(String name) { return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); } - private Context getParentSpanContext(OpenTelemetry openTelemetry) { - // If we already have a non-root span context that should be the parent - if (Context.current() != Context.root()) { - return Context.current(); - } - - // If not let us check for a parent context in the thread context + private Context getParentSpanContext() { + // Check for a parent context in the thread context String traceParentHeader = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); String traceStateHeader = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); if (traceParentHeader != null) { @@ -310,7 +310,9 @@ private Context getParentSpanContext(OpenTelemetry openTelemetry) { if (traceStateHeader != null) { traceContextMap.put(Task.TRACE_STATE, traceStateHeader); } - return openTelemetry.getPropagators().getTextMapPropagator().extract(Context.current(), traceContextMap, new MapKeyGetter()); + return services.openTelemetry.getPropagators() + .getTextMapPropagator() + .extract(Context.current(), traceContextMap, new MapKeyGetter()); } return null; } From 461226ba9a14c07fe21691d27abef06fac1484f9 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 17 Mar 2022 11:18:58 +0000 Subject: [PATCH 28/90] Seems to be working now :tada: --- .../http/AbstractHttpServerTransport.java | 2 -- .../org/elasticsearch/http/HttpTracer.java | 8 ++--- .../elasticsearch/rest/RestController.java | 4 +-- .../elasticsearch/xpack/apm/APMTracer.java | 32 +++++++++---------- 4 files changed, 19 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 7cb8b227f5b7a..886033723a48d 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -462,8 +462,6 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan channel = innerChannel; } - onTraceStarted(threadContext, channel); - dispatchRequest(restRequest, channel, badRequestCause); } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 14c550c9bd64c..8a048927b7fcd 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -51,15 +51,11 @@ void onTraceStarted(ThreadContext threadContext, RestChannel channel) { } void onTraceStopped(RestChannel channel) { - this.tracers.forEach(t -> { - t.onTraceStopped(channel); - }); + this.tracers.forEach(t -> t.onTraceStopped(channel)); } void onTraceEvent(RestChannel channel, String eventName) { - this.tracers.forEach(t -> { - t.onTraceEvent(channel, eventName); - }); + this.tracers.forEach(t -> t.onTraceEvent(channel, eventName)); } /** diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 521f1c4dc67a0..12f60aa416c04 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -302,8 +302,6 @@ public void registerHandler(final RestHandler handler) { public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE); try { - copyRestHeaders(request, threadContext); - channel.startTrace(threadContext); tryAllHandlers(request, channel, threadContext); } catch (Exception e) { try { @@ -438,6 +436,8 @@ private void sendContentTypeErrorMessage(@Nullable List contentTypeHeade private void tryAllHandlers(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) throws Exception { try { + copyRestHeaders(request, threadContext); + channel.startTrace(threadContext); validateErrorTrace(request, channel); } catch (IllegalArgumentException e) { channel.sendResponse(BytesRestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, e.getMessage())); diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 6abab668c8be2..d56d59de67440 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -43,8 +43,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; @@ -88,7 +86,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ); private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); - private final Map> spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; private final SecureString endpoint; @@ -233,11 +231,6 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); Context parentContext = getParentSpanContext(); if (parentContext != null) { - if (traceable.getSpanName().startsWith("GET") || traceable.getSpanName().startsWith("POST")) { - String traceParentHeader = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); - String traceStateHeader = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); - LOGGER.warn("BADGER {} {}", traceParentHeader, traceStateHeader); - } spanBuilder.setParent(parentContext); } @@ -286,10 +279,11 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - Releasable previousContext = threadContext.removeRequestHeaders(TRACE_HEADERS); + // Ignore the result here, we don't need to restore the threadContext + threadContext.removeRequestHeaders(TRACE_HEADERS); threadContext.putHeader(spanHeaders); - return Tuple.tuple(span, previousContext::close); + return span; }); } @@ -319,18 +313,22 @@ private Context getParentSpanContext() { @Override public void onTraceStopped(Traceable traceable) { - final var spanTuple = spans.remove(traceable.getSpanId()); - if (spanTuple != null) { - spanTuple.v1().end(); - spanTuple.v2().run(); + final var span = spans.remove(traceable.getSpanId()); + if (span != null) { + span.end(); } + // TODO: geoip-downloader[c] isn't getting stopped? +// LOGGER.warn( +// "Active spans after stopped trace: {}", +// spans.values().stream().map(Tuple::v1).map(span -> ((ReadWriteSpan) span).getName()).toList() +// ); } @Override public void onTraceEvent(Traceable traceable, String eventName) { - final var spanTuple = spans.get(traceable.getSpanId()); - if (spanTuple != null) { - spanTuple.v1().addEvent(eventName); + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.addEvent(eventName); } } From b646a698a477ff6c69145889c38ed2e42e2ce3bf Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 17 Mar 2022 19:28:55 +0000 Subject: [PATCH 29/90] Hacks to try to use the APM Java agent --- distribution/src/bin/elasticsearch | 5 + run.sh | 38 +++++ server/build.gradle | 1 - .../java/org/elasticsearch/node/Node.java | 6 +- x-pack/plugin/apm-integration/build.gradle | 70 ++++---- .../org/elasticsearch/xpack/apm/ApmIT.java | 16 +- .../java/org/elasticsearch/xpack/apm/APM.java | 8 +- .../elasticsearch/xpack/apm/APMTracer.java | 159 +----------------- .../plugin-metadata/plugin-security.policy | 7 + 9 files changed, 102 insertions(+), 208 deletions(-) create mode 100755 run.sh diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch index 06279f90f5271..ec144c87b5fd6 100755 --- a/distribution/src/bin/elasticsearch +++ b/distribution/src/bin/elasticsearch @@ -107,6 +107,11 @@ for i in "${!ARG_LIST[@]}"; do fi done +# HACK HACK HACK +# This is here to avoid running Java commands before this point with +# unwanted arguments +ES_JAVA_OPTS="$ES_JAVA_OPTS $ES_SERVER_OPTS" + # manual parsing to find out, if process should be detached if [[ $DAEMONIZE = false ]]; then exec \ diff --git a/run.sh b/run.sh new file mode 100755 index 0000000000000..6a070158e15bd --- /dev/null +++ b/run.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +set -eo pipefail + +cd build/distribution/local/elasticsearch-8.2.0-SNAPSHOT + +SERVER_URL="" +SECRET_TOKEN="" + +export JAVA_HOME='' + +if [[ ! -f config/elasticsearch.keystore ]]; then + ./bin/elasticsearch-keystore create +# echo "$SERVER_URL" | ./bin/elasticsearch-keystore add -x -f -v 'xpack.apm.endpoint' +# echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x -f -v 'xpack.apm.token' + echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' +fi + +# AGENT="$PWD/modules/apm-integration/elastic-apm-agent-1.29.0.jar" +AGENT="$PWD/modules/apm-integration/elastic-apm-agent-1.29.0.jar" + +AGENT_OPTS="" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_name=elasticsearch" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.instrument=false" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.server_url=$SERVER_URL" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.secret_token=$SECRET_TOKEN" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_version=8.2.0-SNAPSHOT" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.environment=dev" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_level=trace" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_file=$PWD/apm.log" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.enable_experimental_instrumentations=true" + +# SUSPEND_JVM="n" + +# export ES_SERVER_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=n,address=*:5005 -ea -javaagent:$AGENT $AGENT_OPTS" +export ES_SERVER_OPTS="-ea -javaagent:$AGENT $AGENT_OPTS" + +exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true diff --git a/server/build.gradle b/server/build.gradle index c5e28793efa48..8669b1fa62f55 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -72,7 +72,6 @@ dependencies { internalClusterTestImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'server' } - } tasks.named("forbiddenPatterns").configure { diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 91f5cfe58a0f3..9a777ab4ffab3 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -214,7 +214,6 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; - import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; @@ -698,11 +697,12 @@ protected Node( repositoriesServiceReference::get ).stream() ) - .collect(Collectors.toList()); + .toList(); final List tracers = pluginComponents.stream() .map(c -> c instanceof Tracer t ? t : null) - .filter(Objects::nonNull).toList(); + .filter(Objects::nonNull) + .toList(); ActionModule actionModule = new ActionModule( settings, diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index a9e4b567dfb9e..1cb90baaa94c0 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -17,48 +17,50 @@ esplugin { dependencies { implementation "io.opentelemetry:opentelemetry-api:${versions.opentelemetry}" - implementation "io.opentelemetry:opentelemetry-api-metrics:${versions.opentelemetry}-alpha" + // implementation "io.opentelemetry:opentelemetry-api-metrics:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-context:${versions.opentelemetry}" - implementation "io.opentelemetry:opentelemetry-sdk:${versions.opentelemetry}" - implementation "io.opentelemetry:opentelemetry-sdk-trace:${versions.opentelemetry}" - implementation "io.opentelemetry:opentelemetry-sdk-common:${versions.opentelemetry}" - implementation "io.opentelemetry:opentelemetry-sdk-metrics:${versions.opentelemetry}-alpha" + // implementation "io.opentelemetry:opentelemetry-sdk:${versions.opentelemetry}" + // implementation "io.opentelemetry:opentelemetry-sdk-trace:${versions.opentelemetry}" + // implementation "io.opentelemetry:opentelemetry-sdk-common:${versions.opentelemetry}" + // implementation "io.opentelemetry:opentelemetry-sdk-metrics:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha" - implementation "io.opentelemetry:opentelemetry-exporter-logging:${versions.opentelemetry}" - // required to use OTLP (to get OtlpGrpcSpanExporter) - implementation "io.opentelemetry:opentelemetry-exporter-otlp-trace:${versions.opentelemetry}" - // required by OTLP (to get GrpcExporter, Marshaller etc) - implementation "io.opentelemetry:opentelemetry-exporter-otlp-common:${versions.opentelemetry}" - // required by OTLP common & trace - implementation "io.grpc:grpc-api:1.42.1" - implementation "io.grpc:grpc-stub:1.42.1" - implementation "io.grpc:grpc-core:1.42.1" - implementation "io.grpc:grpc-context:1.42.1" - // netty HTTP client is used for gRPC calls to Elastic's APM server - implementation "io.grpc:grpc-netty:1.42.1" - // okio and okhttp are required by GrpcExporter as the default implementation, but we don't use it - implementation "com.squareup.okhttp3:okhttp:3.14.9" - implementation "com.squareup.okio:okio:1.17.2" - // required by io.grpc - implementation 'io.perfmark:perfmark-api:0.24.0' - implementation 'io.perfmark:perfmark-impl:0.24.0' - runtimeOnly 'com.google.guava:failureaccess:1.0.1' - // required by grpc-netty - api "io.netty:netty-buffer:${versions.netty}" - api "io.netty:netty-transport:${versions.netty}" - api "io.netty:netty-common:${versions.netty}" - api "io.netty:netty-codec:${versions.netty}" - api "io.netty:netty-codec-http:${versions.netty}" - api "io.netty:netty-codec-http2:${versions.netty}" - api "io.netty:netty-handler:${versions.netty}" - api "io.netty:netty-resolver:${versions.netty}" - runtimeOnly 'com.google.guava:guava:31.0.1-jre' + // implementation "io.opentelemetry:opentelemetry-exporter-logging:${versions.opentelemetry}" + // // required to use OTLP (to get OtlpGrpcSpanExporter) + // implementation "io.opentelemetry:opentelemetry-exporter-otlp-trace:${versions.opentelemetry}" + // // required by OTLP (to get GrpcExporter, Marshaller etc) + // implementation "io.opentelemetry:opentelemetry-exporter-otlp-common:${versions.opentelemetry}" + // // required by OTLP common & trace + // implementation "io.grpc:grpc-api:1.42.1" + // implementation "io.grpc:grpc-stub:1.42.1" + // implementation "io.grpc:grpc-core:1.42.1" + // implementation "io.grpc:grpc-context:1.42.1" + // // netty HTTP client is used for gRPC calls to Elastic's APM server + // implementation "io.grpc:grpc-netty:1.42.1" + // // okio and okhttp are required by GrpcExporter as the default implementation, but we don't use it + // implementation "com.squareup.okhttp3:okhttp:3.14.9" + // implementation "com.squareup.okio:okio:1.17.2" + // // required by io.grpc + // implementation 'io.perfmark:perfmark-api:0.24.0' + // implementation 'io.perfmark:perfmark-impl:0.24.0' + // runtimeOnly 'com.google.guava:failureaccess:1.0.1' + // // required by grpc-netty + // api "io.netty:netty-buffer:${versions.netty}" + // api "io.netty:netty-transport:${versions.netty}" + // api "io.netty:netty-common:${versions.netty}" + // api "io.netty:netty-codec:${versions.netty}" + // api "io.netty:netty-codec-http:${versions.netty}" + // api "io.netty:netty-codec-http2:${versions.netty}" + // api "io.netty:netty-handler:${versions.netty}" + // api "io.netty:netty-resolver:${versions.netty}" + // runtimeOnly 'com.google.guava:guava:31.0.1-jre' compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) internalClusterTestImplementation(testArtifact(project(xpackModule('security')))) { exclude group: 'com.google.guava', module: 'guava' } + + runtimeOnly 'co.elastic.apm:elastic-apm-agent:1.29.0' } // no unit-test for now diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 08f1090de8edd..d5e9bb981a746 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -66,14 +66,14 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - ((MockSecureSettings) builder.getSecureSettings()).setString( - APMTracer.APM_ENDPOINT_SETTING.getKey(), - System.getProperty("tests.apm.endpoint", "") - ); - ((MockSecureSettings) builder.getSecureSettings()).setString( - APMTracer.APM_TOKEN_SETTING.getKey(), - System.getProperty("tests.apm.token", "") - ); +// ((MockSecureSettings) builder.getSecureSettings()).setString( +// APMTracer.APM_ENDPOINT_SETTING.getKey(), +// System.getProperty("tests.apm.endpoint", "") +// ); +// ((MockSecureSettings) builder.getSecureSettings()).setString( +// APMTracer.APM_TOKEN_SETTING.getKey(), +// System.getProperty("tests.apm.token", "") +// ); builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); return builder.build(); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 1b992ffaf9227..f5ad73d1dabf7 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -60,12 +60,6 @@ public Collection createComponents( @Override public List> getSettings() { - return List.of( - APMTracer.APM_ENABLED_SETTING, - APMTracer.APM_ENDPOINT_SETTING, - APMTracer.APM_TOKEN_SETTING, - APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING, - APMTracer.APM_SAMPLE_RATE_SETTING - ); + return List.of(APMTracer.APM_ENABLED_SETTING, APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index d56d59de67440..305d4054c087b 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -7,38 +7,19 @@ package org.elasticsearch.xpack.apm; +import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.Context; -import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.context.propagation.TextMapGetter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.common.CompletableResultCode; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.SpanProcessor; -import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; -import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; -import io.opentelemetry.sdk.trace.export.SpanExporter; -import io.opentelemetry.sdk.trace.samplers.Sampler; -import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -47,21 +28,15 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Queue; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.elasticsearch.common.settings.Setting.Property.Dynamic; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; @@ -69,14 +44,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { - private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); - - public static final CapturingSpanExporter CAPTURING_SPAN_EXPORTER = new CapturingSpanExporter(); - static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); - static final Setting APM_ENDPOINT_SETTING = SecureSetting.secureString("xpack.apm.endpoint", null); - static final Setting APM_TOKEN_SETTING = SecureSetting.secureString("xpack.apm.token", null); - static final Setting APM_SAMPLE_RATE_SETTING = Setting.floatSetting("xpack.apm.tracing.sample_rate", 1.0f, Dynamic, NodeScope); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( "xpack.apm.tracing.names.include", Collections.emptyList(), @@ -89,35 +57,24 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; - private final SecureString endpoint; - private final SecureString token; private volatile boolean enabled; - private volatile float sampleRate; private volatile APMServices services; private List includeNames; - public void setSampleRate(float sampleRate) { - this.sampleRate = sampleRate; - } - /** * This class is required to make all open telemetry services visible at once */ - private record APMServices(SdkTracerProvider provider, Tracer tracer, OpenTelemetry openTelemetry) {} + private record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} public APMTracer(Settings settings, ThreadPool threadPool, ClusterService clusterService) { this.threadPool = Objects.requireNonNull(threadPool); this.clusterService = Objects.requireNonNull(clusterService); - this.endpoint = APM_ENDPOINT_SETTING.get(settings); - this.token = APM_TOKEN_SETTING.get(settings); this.enabled = APM_ENABLED_SETTING.get(settings); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); - this.sampleRate = APM_SAMPLE_RATE_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); - clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_SAMPLE_RATE_SETTING, this::setSampleRate); } public boolean isEnabled() { @@ -163,46 +120,11 @@ protected void doClose() { private void createApmServices() { assert enabled; - var acquired = shutdownPermits.tryAcquire(); - if (acquired == false) { - return;// doStop() is already executed - } - - final String endpoint = this.endpoint.toString(); - final String token = this.token.toString(); - - var provider = AccessController.doPrivileged( - (PrivilegedAction) () -> SdkTracerProvider.builder() - .setResource( - Resource.create( - Attributes.of( - ResourceAttributes.SERVICE_NAME, - "elasticsearch", - ResourceAttributes.SERVICE_NAMESPACE, - clusterService.getClusterName().value(), - ResourceAttributes.SERVICE_INSTANCE_ID, - clusterService.getNodeName(), - ResourceAttributes.SERVICE_VERSION, - Version.CURRENT.toString(), - ResourceAttributes.DEPLOYMENT_ENVIRONMENT, - "dev" - ) - ) - ) - // TODO make dynamic - .setSampler(Sampler.traceIdRatioBased(this.sampleRate)) - .addSpanProcessor(createSpanProcessor(endpoint, token)) - .build() - ); - - var openTelemetry = OpenTelemetrySdk.builder() - .setTracerProvider(provider) - .setPropagators(ContextPropagators.create(W3CTraceContextPropagator.getInstance())) - .build(); + var openTelemetry = GlobalOpenTelemetry.get(); var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); assert this.services == null; - this.services = new APMServices(provider, tracer, openTelemetry); + this.services = new APMServices(tracer, openTelemetry); } private void destroyApmServices() { @@ -212,7 +134,6 @@ private void destroyApmServices() { return; } spans.clear();// discard in-flight spans - services.provider.shutdown().whenComplete(shutdownPermits::release); } @Override @@ -317,11 +238,6 @@ public void onTraceStopped(Traceable traceable) { if (span != null) { span.end(); } - // TODO: geoip-downloader[c] isn't getting stopped? -// LOGGER.warn( -// "Active spans after stopped trace: {}", -// spans.values().stream().map(Tuple::v1).map(span -> ((ReadWriteSpan) span).getName()).toList() -// ); } @Override @@ -332,73 +248,6 @@ public void onTraceEvent(Traceable traceable, String eventName) { } } - private static SpanProcessor createSpanProcessor(String endpoint, String token) { - SpanProcessor processor = SimpleSpanProcessor.create(CAPTURING_SPAN_EXPORTER); - if (Strings.hasLength(endpoint) == false || Strings.hasLength(token) == false) { - return processor; - } - - final OtlpGrpcSpanExporter exporter = AccessController.doPrivileged( - (PrivilegedAction) () -> OtlpGrpcSpanExporter.builder() - .setEndpoint(endpoint) - .addHeader("Authorization", "Bearer " + token) - .build() - ); - return SpanProcessor.composite( - processor, - AccessController.doPrivileged( - (PrivilegedAction) () -> BatchSpanProcessor.builder(exporter) - .setScheduleDelay(100, TimeUnit.MILLISECONDS) - .build() - ) - ); - } - - public static class CapturingSpanExporter implements SpanExporter { - - private final Queue capturedSpans = ConcurrentCollections.newQueue(); - - public void clear() { - capturedSpans.clear(); - } - - public List getCapturedSpans() { - return List.copyOf(capturedSpans); - } - - public Stream findSpan(Predicate predicate) { - return getCapturedSpans().stream().filter(predicate); - } - - public Stream findSpanByName(String name) { - return findSpan(span -> Objects.equals(span.getName(), name)); - } - - public Stream findSpanBySpanId(String spanId) { - return findSpan(span -> Objects.equals(span.getSpanId(), spanId)); - } - - public Stream findSpanByParentSpanId(String parentSpanId) { - return findSpan(span -> Objects.equals(span.getParentSpanId(), parentSpanId)); - } - - @Override - public CompletableResultCode export(Collection spans) { - capturedSpans.addAll(spans); - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode flush() { - return CompletableResultCode.ofSuccess(); - } - - @Override - public CompletableResultCode shutdown() { - return CompletableResultCode.ofSuccess(); - } - } - private static class MapKeyGetter implements TextMapGetter> { @Override diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index adb23c68ac7a6..db2a5fd74951a 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -15,3 +15,10 @@ grant { permission java.lang.RuntimePermission "getClassLoader"; permission java.net.SocketPermission "*", "connect,resolve"; }; + +grant codeBase "${codebase.elastic-apm-agent}" { + permission java.lang.RuntimePermission "setFactory"; + permission java.lang.RuntimePermission "getClassLoader"; + permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.net.SocketPermission "*", "connect,resolve"; +}; From 64adb49a1ecd51eed25a8df58068ea0b6786ebfc Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 18 Mar 2022 14:51:42 +0000 Subject: [PATCH 30/90] Formatting --- .../netty4/Netty4HttpServerTransport.java | 3 +- .../transport/netty4/Netty4Plugin.java | 3 +- .../http/netty4/Netty4BadRequestTests.java | 3 +- .../Netty4HttpServerPipeliningTests.java | 3 +- .../transport/nio/NioTransportPlugin.java | 21 ++++--- .../elasticsearch/action/ActionModule.java | 3 +- .../http/AbstractHttpServerTransport.java | 3 +- .../java/org/elasticsearch/node/Node.java | 3 +- .../elasticsearch/plugins/NetworkPlugin.java | 3 +- .../org/elasticsearch/rest/RestChannel.java | 7 +-- .../elasticsearch/rest/RestController.java | 3 +- .../action/ActionModuleTests.java | 12 ++-- .../common/network/NetworkModuleTests.java | 63 ++++++++++--------- .../AbstractHttpServerTransportTests.java | 18 ++++-- .../elasticsearch/xpack/apm/APMTracer.java | 8 +-- .../core/LocalStateCompositeXPackPlugin.java | 24 +++---- .../xpack/security/Security.java | 6 +- .../SecurityNetty4HttpServerTransport.java | 4 +- .../xpack/security/SecurityTests.java | 3 +- .../authz/AuthorizationServiceTests.java | 6 +- ...ecurityNetty4HttpServerTransportTests.java | 21 ++++--- 21 files changed, 125 insertions(+), 95 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index a6e22376575aa..3574ca16e144e 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -150,7 +150,8 @@ public Netty4HttpServerTransport( Dispatcher dispatcher, ClusterSettings clusterSettings, SharedGroupFactory sharedGroupFactory, - List tracers) { + List tracers + ) { super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, tracers); Netty4Utils.setAvailableProcessors(EsExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java index 3375d8f79a55f..1359263606e3b 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java @@ -100,7 +100,8 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers) { + List tracers + ) { return Collections.singletonMap( NETTY_HTTP_TRANSPORT_NAME, () -> new Netty4HttpServerTransport( diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java index 67b48ecd40678..21f0309b502c8 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java @@ -89,7 +89,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(Settings.EMPTY), - tracers) + tracers + ) ) { httpServerTransport.start(); final TransportAddress transportAddress = randomFrom(httpServerTransport.boundAddress().boundAddresses()); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 09d9f11527516..db20a228f1c78 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -112,7 +112,8 @@ class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(settings), - tracers); + tracers + ); } @Override diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java index 07f0ada763cc3..b8b19adc187bd 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java @@ -86,16 +86,17 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings, - List tracers) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher dispatcher, + ClusterSettings clusterSettings, + List tracers + ) { return Collections.singletonMap( NIO_HTTP_TRANSPORT_NAME, () -> new NioHttpServerTransport( diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index bfd751c17c35a..451ad22592e5e 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -457,7 +457,8 @@ public ActionModule( CircuitBreakerService circuitBreakerService, UsageService usageService, SystemIndices systemIndices, - List tracers) { + List tracers + ) { this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; this.indexScopedSettings = indexScopedSettings; diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 886033723a48d..4d9370a5b8964 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -102,7 +102,8 @@ protected AbstractHttpServerTransport( NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers) { + List tracers + ) { this.settings = settings; this.networkService = networkService; this.bigArrays = bigArrays; diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 3ac2923598089..c8478cf5bcc0e 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -702,7 +702,8 @@ protected Node( final List tracers = pluginComponents.stream() .map(c -> c instanceof Tracer t ? t : null) - .filter(Objects::nonNull).toList(); + .filter(Objects::nonNull) + .toList(); ActionModule actionModule = new ActionModule( settings, diff --git a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java index 607327842d311..460a0100a8172 100644 --- a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java @@ -76,7 +76,8 @@ default Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers) { + List tracers + ) { return Collections.emptyMap(); } } diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index 0996616a060f2..2b5030657f8ef 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -56,12 +56,7 @@ default String getSpanName() { @Override default Map getAttributes() { var req = this.request(); - return Map.of( - "http.method", - req.method().name(), - "http.url", - req.uri() - ); + return Map.of("http.method", req.method().name(), "http.url", req.uri()); } void startTrace(ThreadContext threadContext); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 3d5752b11fef4..2fd860957eec5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -96,7 +96,8 @@ public RestController( UnaryOperator handlerWrapper, NodeClient client, CircuitBreakerService circuitBreakerService, - UsageService usageService) { + UsageService usageService + ) { this.headersToCopy = headersToCopy; this.usageService = usageService; if (handlerWrapper == null) { diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index dd7fd47b4d06d..7142adc106b74 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -116,7 +116,8 @@ public void testSetupRestHandlerContainsKnownBuiltin() { null, usageService, null, - null); + null + ); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( @@ -172,7 +173,8 @@ public String getName() { null, usageService, null, - null); + null + ); Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); assertThat(e.getMessage(), startsWith("Cannot replace existing handler for [/] for method: GET")); } finally { @@ -221,7 +223,8 @@ public List getRestHandlers( null, usageService, null, - null); + null + ); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows( @@ -265,7 +268,8 @@ public void test3rdPartyHandlerIsNotInstalled() { null, usageService, null, - null) + null + ) ); assertThat( e.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 4b7d76406cefe..9556cab19e04b 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -112,16 +112,17 @@ public void testRegisterHttpTransport() { NetworkModule module = newNetworkModule(settings, new NetworkPlugin() { @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings, - List tracers) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers + ) { return Collections.singletonMap("custom", custom); } }); @@ -157,16 +158,17 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings, - List tracers) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers + ) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); supplierMap.put("default_custom", def); @@ -200,16 +202,17 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher requestDispatcher, - ClusterSettings clusterSettings, - List tracers) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher requestDispatcher, + ClusterSettings clusterSettings, + List tracers + ) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); supplierMap.put("default_custom", def); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 18129d0b86aac..022b527c5de9d 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -162,7 +162,8 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -276,7 +277,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -326,7 +328,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -479,7 +482,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { return null; @@ -535,7 +539,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -610,7 +615,8 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers) { + tracers + ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index d56d59de67440..c485af500d6b1 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -318,10 +318,10 @@ public void onTraceStopped(Traceable traceable) { span.end(); } // TODO: geoip-downloader[c] isn't getting stopped? -// LOGGER.warn( -// "Active spans after stopped trace: {}", -// spans.values().stream().map(Tuple::v1).map(span -> ((ReadWriteSpan) span).getName()).toList() -// ); + // LOGGER.warn( + // "Active spans after stopped trace: {}", + // spans.values().stream().map(Tuple::v1).map(span -> ((ReadWriteSpan) span).getName()).toList() + // ); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index a832e33ff32eb..40caa05cbba4d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -393,16 +393,17 @@ public Map> getTransports( @Override public Map> getHttpTransports( - Settings settings, - ThreadPool threadPool, - BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, - CircuitBreakerService circuitBreakerService, - NamedXContentRegistry xContentRegistry, - NetworkService networkService, - HttpServerTransport.Dispatcher dispatcher, - ClusterSettings clusterSettings, - List tracers) { + Settings settings, + ThreadPool threadPool, + BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, + CircuitBreakerService circuitBreakerService, + NamedXContentRegistry xContentRegistry, + NetworkService networkService, + HttpServerTransport.Dispatcher dispatcher, + ClusterSettings clusterSettings, + List tracers + ) { Map> transports = new HashMap<>(); filterPlugins(NetworkPlugin.class).stream() .forEach( @@ -417,7 +418,8 @@ public Map> getHttpTransports( networkService, dispatcher, clusterSettings, - null) + null + ) ) ); return transports; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 78c4995bedb58..16aac894ce8ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1522,7 +1522,8 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers) { + List tracers + ) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyMap(); } @@ -1541,7 +1542,8 @@ public Map> getHttpTransports( dispatcher, clusterSettings, getNettySharedGroupFactory(settings), - tracers) + tracers + ) ); httpTransports.put( SecurityField.NIO, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index 539321060fb9b..ad18bcd35d052 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -27,10 +27,10 @@ import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler; import org.elasticsearch.xpack.security.transport.filter.IPFilter; -import javax.net.ssl.SSLEngine; - import java.util.List; +import javax.net.ssl.SSLEngine; + import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index e573fc96e0437..99d33fbc72f4a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -705,7 +705,8 @@ public void testSecurityRestHandlerWrapperCanBeInstalled() throws IllegalAccessE null, usageService, null, - null); + null + ); actionModule.initRestHandlers(null); appender.assertAllExpectationsMatched(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 02f7a7e25a5cc..3050c465d0475 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -1570,7 +1570,7 @@ public void testDenialForAnonymousUser() throws IOException { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, new AuthorizationTracer(threadContext) ); @@ -1618,7 +1618,7 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() throws IO new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, new AuthorizationTracer(threadContext) ); @@ -2767,7 +2767,7 @@ public void getUserPrivileges( licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, new AuthorizationTracer(threadContext) ); Authentication authentication; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 743aa4e59f8ec..4f5aa6e93983b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -78,7 +78,8 @@ public void testDefaultClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -104,7 +105,8 @@ public void testOptionalClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -130,7 +132,8 @@ public void testRequiredClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); @@ -156,7 +159,8 @@ public void testNoClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); @@ -177,7 +181,8 @@ public void testCustomSSLConfiguration() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); ChannelHandler handler = transport.configureServerChannelHandler(); EmbeddedChannel ch = new EmbeddedChannel(handler); SSLEngine defaultEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -199,7 +204,8 @@ public void testCustomSSLConfiguration() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); handler = transport.configureServerChannelHandler(); ch = new EmbeddedChannel(handler); SSLEngine customEngine = ch.pipeline().get(SslHandler.class).engine(); @@ -230,7 +236,8 @@ public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Excep new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of()); + List.of() + ); assertNotNull(transport.configureServerChannelHandler()); } } From cd6f1efeddc15ccda6fd11607bb9220a9a699dbd Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 21 Mar 2022 15:36:45 +0000 Subject: [PATCH 31/90] Improve REST tracing --- .../http/DefaultRestChannel.java | 11 ++- .../org/elasticsearch/http/HttpTracer.java | 21 ++++ .../rest/AbstractRestChannel.java | 10 +- .../elasticsearch/rest/MethodHandlers.java | 4 + .../org/elasticsearch/rest/RestChannel.java | 27 +++-- .../elasticsearch/rest/RestController.java | 25 ++++- .../org/elasticsearch/tracing/Tracer.java | 10 ++ .../elasticsearch/xpack/apm/APMTracer.java | 98 +++++++++++++++++-- 8 files changed, 185 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 531661d5afab7..54b8ed1b8205d 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -136,6 +136,10 @@ public void sendResponse(RestResponse restResponse) { addCookies(httpResponse); + tracer.setAttribute(this, "http.status_code", restResponse.status().getStatus()); + restResponse.getHeaders() + .forEach((key, values) -> tracer.setAttribute(this, "http.response.headers." + key, String.join("; ", values))); + ActionListener listener = ActionListener.wrap(onFinish); tracer.onTraceEvent(this, "startResponse"); httpChannel.sendResponse(httpResponse, listener); @@ -180,7 +184,7 @@ private void addCookies(HttpResponse response) { } @Override - public void startTrace(ThreadContext threadContext) { + public void startTrace() { this.tracer.onTraceStarted(threadContext, this); } @@ -188,4 +192,9 @@ public void startTrace(ThreadContext threadContext) { public void stopTrace() { this.tracer.onTraceStopped(this); } + + @Override + public void recordException(Throwable throwable) { + this.tracer.onTraceException(this, throwable); + } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 8a048927b7fcd..bb854bba82f58 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -20,6 +20,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tracing.Traceable; import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.TransportService; @@ -58,6 +59,26 @@ void onTraceEvent(RestChannel channel, String eventName) { this.tracers.forEach(t -> t.onTraceEvent(channel, eventName)); } + public void onTraceException(RestChannel channel, Throwable throwable) { + this.tracers.forEach(t -> t.onTraceException(channel, throwable)); + } + + void setAttribute(Traceable traceable, String key, boolean value) { + this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + } + + void setAttribute(Traceable traceable, String key, double value) { + this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + } + + void setAttribute(Traceable traceable, String key, long value) { + this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + } + + void setAttribute(Traceable traceable, String key, String value) { + this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + } + /** * Logs the given request if request tracing is enabled and the request uri matches the current include and exclude patterns defined * in {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_INCLUDE} and {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_EXCLUDE}. diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index 58dc097d7bcc9..29e6e7b957b3e 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -45,6 +45,8 @@ public abstract class AbstractRestChannel implements RestChannel { private BytesStreamOutput bytesOut; + private String tracePath; + /** * Construct a channel for handling the request. * @@ -194,12 +196,12 @@ public boolean detailedErrorsEnabled() { } @Override - public void startTrace(ThreadContext threadContext) { - // no op + public String getTracePath() { + return tracePath; } @Override - public void stopTrace() { - // no op + public void setTracePath(String tracePath) { + this.tracePath = tracePath; } } diff --git a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java index 509960c511c42..1070b61108e48 100644 --- a/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/MethodHandlers.java @@ -32,6 +32,10 @@ final class MethodHandlers { this.methodHandlers = new HashMap<>(2, 1); } + public String getPath() { + return path; + } + /** * Add a handler for an additional array of methods. Note that {@code MethodHandlers} * does not allow replacing the handler for an already existing method. diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index 2b5030657f8ef..35feb508fb424 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -9,13 +9,13 @@ package org.elasticsearch.rest; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.tracing.Traceable; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.HashMap; import java.util.Map; /** @@ -50,16 +50,31 @@ default String getSpanId() { @Override default String getSpanName() { - return this.request().method() + " " + this.request().path(); + final String tracePath = this.getTracePath(); + return this.request().method() + " " + (tracePath != null ? tracePath : this.request().path()); } @Override default Map getAttributes() { - var req = this.request(); - return Map.of("http.method", req.method().name(), "http.url", req.uri()); + final RestRequest req = this.request(); + Map attributes = new HashMap<>(); + req.getHeaders().forEach((key, values) -> attributes.put("http.request.headers." + key, String.join("; ", values))); + attributes.put("http.method", req.method().name()); + attributes.put("http.url", req.uri()); + switch (req.getHttpRequest().protocolVersion()) { + case HTTP_1_0 -> attributes.put("http.flavour", "1.0"); + case HTTP_1_1 -> attributes.put("http.flavour", "1.1"); + } + return attributes; } - void startTrace(ThreadContext threadContext); + void setTracePath(String path); - void stopTrace(); + String getTracePath(); + + default void startTrace() {} + + default void stopTrace() {} + + default void recordException(Throwable throwable) {} } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 2fd860957eec5..561ccdb389d13 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -437,7 +437,6 @@ private void sendContentTypeErrorMessage(@Nullable List contentTypeHeade private void tryAllHandlers(final RestRequest request, final RestChannel channel, final ThreadContext threadContext) throws Exception { try { copyRestHeaders(request, threadContext); - channel.startTrace(threadContext); validateErrorTrace(request, channel); } catch (IllegalArgumentException e) { channel.sendResponse(BytesRestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, e.getMessage())); @@ -467,15 +466,20 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel return; } } else { + channel.setTracePath(handlers.getPath()); + channel.startTrace(); dispatchRequest(request, channel, handler, threadContext); return; } } } catch (final IllegalArgumentException e) { + channel.startTrace(); + channel.recordException(e); handleUnsupportedHttpMethod(uri, null, channel, getValidHandlerMethodSet(rawPath), e); return; } // If request has not been handled, fallback to a bad request error. + channel.startTrace(); handleBadRequest(uri, requestMethod, channel); } @@ -676,14 +680,29 @@ private void close() { } @Override - public void startTrace(ThreadContext threadContext) { - delegate.startTrace(threadContext); + public void startTrace() { + delegate.startTrace(); } @Override public void stopTrace() { delegate.stopTrace(); } + + @Override + public void recordException(Throwable throwable) { + delegate.recordException(throwable); + } + + @Override + public void setTracePath(String path) { + delegate.setTracePath(path); + } + + @Override + public String getTracePath() { + return delegate.getTracePath(); + } } private static CircuitBreaker inFlightRequestsBreaker(CircuitBreakerService circuitBreakerService) { diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index e8c4e5e49d4a6..37dad1e7a29d5 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -26,4 +26,14 @@ public interface Tracer { void onTraceStopped(Traceable traceable); void onTraceEvent(Traceable traceable, String eventName); + + void onTraceException(Traceable traceable, Throwable throwable); + + void setAttribute(Traceable traceable, String key, boolean value); + + void setAttribute(Traceable traceable, String key, double value); + + void setAttribute(Traceable traceable, String key, long value); + + void setAttribute(Traceable traceable, String key, String value); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index c485af500d6b1..73c0ed6f2a25b 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -20,6 +20,7 @@ import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; @@ -49,13 +50,16 @@ import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Queue; +import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -253,14 +257,20 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { } } - // hack transactions to avoid the 'custom' type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); + // These attributes don't apply to HTTP spans. The APM server can infer a number of things + // when "http." attributes are present + if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { + // hack transactions to avoid the 'custom' transaction type + // this one is not part of OTel semantic attributes + spanBuilder.setAttribute("type", "elasticsearch"); + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); + } - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); +// spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); +// spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in @@ -283,10 +293,54 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { threadContext.removeRequestHeaders(TRACE_HEADERS); threadContext.putHeader(spanHeaders); + logGraphviz(span); + return span; }); } + private static final Set CACHE = new HashSet<>(); + + @Override + public void onTraceException(Traceable traceable, Throwable throwable) { + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.recordException(throwable); + } + } + + @Override + public void setAttribute(Traceable traceable, String key, boolean value) { + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.setAttribute(key, value); + } + } + + @Override + public void setAttribute(Traceable traceable, String key, double value) { + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.setAttribute(key, value); + } + } + + @Override + public void setAttribute(Traceable traceable, String key, long value) { + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.setAttribute(key, value); + } + } + + @Override + public void setAttribute(Traceable traceable, String key, String value) { + final var span = spans.get(traceable.getSpanId()); + if (span != null) { + span.setAttribute(key, value); + } + } + private boolean isSpanNameIncluded(String name) { // Alternatively we could use automata here but it is much more complex // and it needs wrapping like done for use in the security plugin. @@ -415,4 +469,34 @@ public String get(Map carrier, String key) { private static boolean isSupportedContextKey(String key) { return TRACE_HEADERS.contains(key); } + + private static void logGraphviz(Span span) { + final String spanStr = span.toString(); + + int i = spanStr.indexOf("spanId="); + int j = spanStr.indexOf(",", i); + String spanId = spanStr.substring(i + 7, j); + + String parentSpanId = null; + i = spanStr.indexOf("spanId=", j); + if (i > -1) { + j = spanStr.indexOf(",", i); + parentSpanId = spanStr.substring(i + 7, j); + } + + i = spanStr.indexOf("name=", j); + j = spanStr.indexOf(",", i); + String spanName = spanStr.substring(i + 5, j); + +// LOGGER.warn("BADGER: {}", span); + + if (CACHE.add(spanId)) { + LOGGER.warn("BADGER: __{} [label=\"{}\"]", spanId, spanName); + } + + if (parentSpanId != null) { + LOGGER.warn("BADGER: __{} -> __{}", spanId, parentSpanId); + } + + } } From f04c6ffcba01571c2b828774f5dde879218b34bc Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 22 Mar 2022 13:57:07 +0000 Subject: [PATCH 32/90] Update to latest APM agent --- .../elasticsearch/bootstrap/PolicyUtil.java | 3 +- .../elasticsearch/bootstrap/security.policy | 5 + x-pack/plugin/apm-integration/build.gradle | 266 ++++------ .../elastic-apm-agent-1.30.0.jar.sha1 | 1 + ...ENSE.txt => elastic-apm-agent-LICENSE.txt} | 2 +- .../licenses/elastic-apm-agent-NOTICE.txt | 465 ++++++++++++++++++ .../licenses/failureaccess-1.0.1.jar.sha1 | 1 - .../licenses/failureaccess-LICENSE.txt | 201 -------- .../licenses/failureaccess-NOTICE.txt | 0 .../apm-integration/licenses/grpc-LICENSE.txt | 202 -------- .../apm-integration/licenses/grpc-NOTICE.txt | 62 --- .../licenses/grpc-api-1.42.1.jar.sha1 | 1 - .../licenses/grpc-context-1.42.1.jar.sha1 | 1 - .../licenses/grpc-core-1.42.1.jar.sha1 | 1 - .../licenses/grpc-netty-1.42.1.jar.sha1 | 1 - .../licenses/grpc-stub-1.42.1.jar.sha1 | 1 - .../licenses/guava-31.0.1-jre.jar.sha1 | 1 - .../licenses/guava-LICENSE.txt | 202 -------- .../apm-integration/licenses/guava-NOTICE.txt | 0 .../licenses/netty-LICENSE.txt | 202 -------- .../apm-integration/licenses/netty-NOTICE.txt | 116 ----- .../netty-buffer-4.1.74.Final.jar.sha1 | 1 - .../netty-codec-4.1.74.Final.jar.sha1 | 1 - .../netty-codec-http-4.1.74.Final.jar.sha1 | 1 - .../netty-codec-http2-4.1.74.Final.jar.sha1 | 1 - .../netty-common-4.1.74.Final.jar.sha1 | 1 - .../netty-handler-4.1.74.Final.jar.sha1 | 1 - .../netty-resolver-4.1.74.Final.jar.sha1 | 1 - .../netty-transport-4.1.74.Final.jar.sha1 | 1 - .../licenses/okhttp-3.14.9.jar.sha1 | 1 - .../licenses/okhttp-NOTICE.txt | 0 .../licenses/okio-1.17.2.jar.sha1 | 1 - .../apm-integration/licenses/okio-LICENSE.txt | 201 -------- .../apm-integration/licenses/okio-NOTICE.txt | 0 ...telemetry-api-metrics-1.9.0-alpha.jar.sha1 | 1 - ...ntelemetry-exporter-logging-1.9.0.jar.sha1 | 1 - ...emetry-exporter-otlp-common-1.9.0.jar.sha1 | 1 - ...lemetry-exporter-otlp-trace-1.9.0.jar.sha1 | 1 - .../licenses/opentelemetry-sdk-1.9.0.jar.sha1 | 1 - .../opentelemetry-sdk-common-1.9.0.jar.sha1 | 1 - ...telemetry-sdk-metrics-1.9.0-alpha.jar.sha1 | 1 - .../opentelemetry-sdk-trace-1.9.0.jar.sha1 | 1 - .../licenses/perfmark-LICENSE.txt | 201 -------- .../licenses/perfmark-NOTICE.txt | 41 -- .../licenses/perfmark-api-0.24.0.jar.sha1 | 1 - .../licenses/perfmark-impl-0.24.0.jar.sha1 | 1 - .../elasticsearch/xpack/apm/APMTracer.java | 114 +++-- .../plugin-metadata/plugin-security.policy | 12 +- 48 files changed, 645 insertions(+), 1678 deletions(-) create mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 rename x-pack/plugin/apm-integration/licenses/{okhttp-LICENSE.txt => elastic-apm-agent-LICENSE.txt} (99%) create mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/failureaccess-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/guava-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/okhttp-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/okio-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 diff --git a/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java b/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java index 003c9a121a575..a708bfc51b0be 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/PolicyUtil.java @@ -184,7 +184,8 @@ public boolean test(Permission permission) { new RuntimePermission("createClassLoader"), new RuntimePermission("getFileStoreAttributes"), new RuntimePermission("accessUserInformation"), - new AuthPermission("modifyPrivateCredentials") + new AuthPermission("modifyPrivateCredentials"), + new RuntimePermission("accessSystemModules") ); PermissionCollection modulePermissionCollection = new Permissions(); namedPermissions.forEach(modulePermissionCollection::add); diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index b617083e85ab2..df1946064f538 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -157,3 +157,8 @@ grant { // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; + +grant codeBase "${codebase.log4j-api}" { + permission java.lang.RuntimePermission "getClassLoader"; + permission java.lang.RuntimePermission "getProtectionDomain"; +}; diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 1cb90baaa94c0..a5ae714500e33 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -17,50 +17,15 @@ esplugin { dependencies { implementation "io.opentelemetry:opentelemetry-api:${versions.opentelemetry}" - // implementation "io.opentelemetry:opentelemetry-api-metrics:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-context:${versions.opentelemetry}" - // implementation "io.opentelemetry:opentelemetry-sdk:${versions.opentelemetry}" - // implementation "io.opentelemetry:opentelemetry-sdk-trace:${versions.opentelemetry}" - // implementation "io.opentelemetry:opentelemetry-sdk-common:${versions.opentelemetry}" - // implementation "io.opentelemetry:opentelemetry-sdk-metrics:${versions.opentelemetry}-alpha" implementation "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha" - // implementation "io.opentelemetry:opentelemetry-exporter-logging:${versions.opentelemetry}" - // // required to use OTLP (to get OtlpGrpcSpanExporter) - // implementation "io.opentelemetry:opentelemetry-exporter-otlp-trace:${versions.opentelemetry}" - // // required by OTLP (to get GrpcExporter, Marshaller etc) - // implementation "io.opentelemetry:opentelemetry-exporter-otlp-common:${versions.opentelemetry}" - // // required by OTLP common & trace - // implementation "io.grpc:grpc-api:1.42.1" - // implementation "io.grpc:grpc-stub:1.42.1" - // implementation "io.grpc:grpc-core:1.42.1" - // implementation "io.grpc:grpc-context:1.42.1" - // // netty HTTP client is used for gRPC calls to Elastic's APM server - // implementation "io.grpc:grpc-netty:1.42.1" - // // okio and okhttp are required by GrpcExporter as the default implementation, but we don't use it - // implementation "com.squareup.okhttp3:okhttp:3.14.9" - // implementation "com.squareup.okio:okio:1.17.2" - // // required by io.grpc - // implementation 'io.perfmark:perfmark-api:0.24.0' - // implementation 'io.perfmark:perfmark-impl:0.24.0' - // runtimeOnly 'com.google.guava:failureaccess:1.0.1' - // // required by grpc-netty - // api "io.netty:netty-buffer:${versions.netty}" - // api "io.netty:netty-transport:${versions.netty}" - // api "io.netty:netty-common:${versions.netty}" - // api "io.netty:netty-codec:${versions.netty}" - // api "io.netty:netty-codec-http:${versions.netty}" - // api "io.netty:netty-codec-http2:${versions.netty}" - // api "io.netty:netty-handler:${versions.netty}" - // api "io.netty:netty-resolver:${versions.netty}" - // runtimeOnly 'com.google.guava:guava:31.0.1-jre' + runtimeOnly 'co.elastic.apm:elastic-apm-agent:1.30.0' compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) internalClusterTestImplementation(testArtifact(project(xpackModule('security')))) { exclude group: 'com.google.guava', module: 'guava' } - - runtimeOnly 'co.elastic.apm:elastic-apm-agent:1.29.0' } // no unit-test for now @@ -68,132 +33,107 @@ tasks.named("test").configure { enabled = false } tasks.named("dependencyLicenses").configure { mapping from: /opentelemetry-.*/, to: 'opentelemetry' - mapping from: /grpc-.*/, to: 'grpc' - mapping from: /netty-.*/, to: 'netty' - mapping from: /perfmark-.*/, to: 'perfmark' } -tasks.named("thirdPartyAudit").configure { - ignoreViolations( - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.cache.Striped64', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'com.google.common.hash.Striped64$1', - 'com.google.common.hash.Striped64$Cell', - 'com.google.common.hash.Striped64', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', - 'io.netty.util.internal.PlatformDependent0$1', - 'io.netty.util.internal.PlatformDependent0$2', - 'io.netty.util.internal.PlatformDependent0$3', - 'io.netty.util.internal.PlatformDependent0$5', - 'io.netty.util.internal.PlatformDependent0', - 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueConsumerNodeRef', - 'io.netty.util.internal.shaded.org.jctools.queues.BaseLinkedQueueProducerNodeRef', - 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields', - 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields', - 'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields', - 'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode', - 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField', - 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField', - 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', - 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', - 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueConsumerIndexField', - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerIndexField', - 'io.opentelemetry.internal.shaded.jctools.queues.MpscArrayQueueProducerLimitField', - 'io.opentelemetry.internal.shaded.jctools.util.UnsafeAccess', - 'io.opentelemetry.internal.shaded.jctools.util.UnsafeRefArrayAccess', - ) - ignoreMissingClasses( - 'android.net.ssl.SSLSockets', - 'android.os.Build$VERSION', - 'android.util.Log', - 'com.aayushatharva.brotli4j.Brotli4jLoader', - 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', - 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', - 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', - 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', - 'com.aayushatharva.brotli4j.encoder.Encoder', - 'com.github.luben.zstd.Zstd', - 'com.google.gson.stream.JsonReader', - 'com.google.gson.stream.JsonToken', - 'com.google.protobuf.ExtensionRegistry', - 'com.google.protobuf.ExtensionRegistryLite', - 'com.google.protobuf.MessageLite$Builder', - 'com.google.protobuf.MessageLite', - 'com.google.protobuf.MessageLiteOrBuilder', - 'com.google.protobuf.Parser', - 'com.google.protobuf.nano.CodedOutputByteBufferNano', - 'com.google.protobuf.nano.MessageNano', - 'com.jcraft.jzlib.Deflater', - 'com.jcraft.jzlib.Inflater', - 'com.jcraft.jzlib.JZlib$WrapperType', - 'com.jcraft.jzlib.JZlib', - 'com.ning.compress.BufferRecycler', - 'com.ning.compress.lzf.ChunkDecoder', - 'com.ning.compress.lzf.ChunkEncoder', - 'com.ning.compress.lzf.LZFChunk', - 'com.ning.compress.lzf.LZFEncoder', - 'com.ning.compress.lzf.util.ChunkDecoderFactory', - 'com.ning.compress.lzf.util.ChunkEncoderFactory', - 'io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts', - 'io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder', - 'io.grpc.netty.shaded.io.netty.handler.ssl.SslContextBuilder', - 'io.grpc.okhttp.OkHttpChannelBuilder', - 'io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod', - 'io.netty.internal.tcnative.AsyncTask', - 'io.netty.internal.tcnative.Buffer', - 'io.netty.internal.tcnative.CertificateCallback', - 'io.netty.internal.tcnative.CertificateCompressionAlgo', - 'io.netty.internal.tcnative.CertificateVerifier', - 'io.netty.internal.tcnative.Library', - 'io.netty.internal.tcnative.ResultCallback', - 'io.netty.internal.tcnative.SSL', - 'io.netty.internal.tcnative.SSLContext', - 'io.netty.internal.tcnative.SSLPrivateKeyMethod', - 'io.netty.internal.tcnative.SSLSession', - 'io.netty.internal.tcnative.SSLSessionCache', - 'io.netty.internal.tcnative.SessionTicketKey', - 'io.netty.internal.tcnative.SniHostNameMatcher', - 'io.opentelemetry.sdk.logs.data.Body', - 'io.opentelemetry.sdk.logs.data.LogData', - 'io.opentelemetry.sdk.logs.data.Severity', - 'io.opentelemetry.sdk.logs.export.LogExporter', - 'lzma.sdk.lzma.Encoder', - 'org.bouncycastle.cert.X509v3CertificateBuilder', - 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', - 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', - 'org.conscrypt.AllocatedBuffer', - 'org.conscrypt.BufferAllocator', - 'org.conscrypt.Conscrypt$ProviderBuilder', - 'org.conscrypt.Conscrypt', - 'org.conscrypt.HandshakeListener', - 'org.eclipse.jetty.alpn.ALPN$ClientProvider', - 'org.eclipse.jetty.alpn.ALPN$ServerProvider', - 'org.eclipse.jetty.alpn.ALPN', - 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', - 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', - 'org.eclipse.jetty.npn.NextProtoNego', - 'org.jboss.marshalling.ByteInput', - 'org.jboss.marshalling.ByteOutput', - 'org.jboss.marshalling.Marshaller', - 'org.jboss.marshalling.MarshallerFactory', - 'org.jboss.marshalling.MarshallingConfiguration', - 'org.jboss.marshalling.Unmarshaller', - 'org.slf4j.Logger', - 'org.slf4j.LoggerFactory', - 'org.slf4j.helpers.FormattingTuple', - 'org.slf4j.helpers.MessageFormatter', - 'org.slf4j.spi.LocationAwareLogger', - 'reactor.blockhound.BlockHound$Builder', - 'reactor.blockhound.integration.BlockHoundIntegration', - ) -} +// tasks.named("thirdPartyAudit").configure { +// ignoreViolations( +// // 'com.google.common.cache.Striped64$1', +// // 'com.google.common.cache.Striped64$Cell', +// // 'com.google.common.cache.Striped64', +// // 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', +// // 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', +// // 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', +// // 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', +// // 'com.google.common.hash.Striped64$1', +// // 'com.google.common.hash.Striped64$Cell', +// // 'com.google.common.hash.Striped64', +// // 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', +// // 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', +// // 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', +// // 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper' +// ) +// ignoreMissingClasses( +// // 'android.net.ssl.SSLSockets', +// // 'android.os.Build$VERSION', +// // 'android.util.Log', +// // 'com.aayushatharva.brotli4j.Brotli4jLoader', +// // 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Status', +// // 'com.aayushatharva.brotli4j.decoder.DecoderJNI$Wrapper', +// // 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', +// // 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', +// // 'com.aayushatharva.brotli4j.encoder.Encoder', +// // 'com.github.luben.zstd.Zstd', +// // 'com.google.gson.stream.JsonReader', +// // 'com.google.gson.stream.JsonToken', +// // 'com.google.protobuf.ExtensionRegistry', +// // 'com.google.protobuf.ExtensionRegistryLite', +// // 'com.google.protobuf.MessageLite$Builder', +// // 'com.google.protobuf.MessageLite', +// // 'com.google.protobuf.MessageLiteOrBuilder', +// // 'com.google.protobuf.Parser', +// // 'com.google.protobuf.nano.CodedOutputByteBufferNano', +// // 'com.google.protobuf.nano.MessageNano', +// // 'com.jcraft.jzlib.Deflater', +// // 'com.jcraft.jzlib.Inflater', +// // 'com.jcraft.jzlib.JZlib$WrapperType', +// // 'com.jcraft.jzlib.JZlib', +// // 'com.ning.compress.BufferRecycler', +// // 'com.ning.compress.lzf.ChunkDecoder', +// // 'com.ning.compress.lzf.ChunkEncoder', +// // 'com.ning.compress.lzf.LZFChunk', +// // 'com.ning.compress.lzf.LZFEncoder', +// // 'com.ning.compress.lzf.util.ChunkDecoderFactory', +// // 'com.ning.compress.lzf.util.ChunkEncoderFactory', +// // 'io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts', +// // 'io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder', +// // 'io.grpc.netty.shaded.io.netty.handler.ssl.SslContextBuilder', +// // 'io.grpc.okhttp.OkHttpChannelBuilder', +// // 'io.netty.internal.tcnative.AsyncSSLPrivateKeyMethod', +// // 'io.netty.internal.tcnative.AsyncTask', +// // 'io.netty.internal.tcnative.Buffer', +// // 'io.netty.internal.tcnative.CertificateCallback', +// // 'io.netty.internal.tcnative.CertificateCompressionAlgo', +// // 'io.netty.internal.tcnative.CertificateVerifier', +// // 'io.netty.internal.tcnative.Library', +// // 'io.netty.internal.tcnative.ResultCallback', +// // 'io.netty.internal.tcnative.SSL', +// // 'io.netty.internal.tcnative.SSLContext', +// // 'io.netty.internal.tcnative.SSLPrivateKeyMethod', +// // 'io.netty.internal.tcnative.SSLSession', +// // 'io.netty.internal.tcnative.SSLSessionCache', +// // 'io.netty.internal.tcnative.SessionTicketKey', +// // 'io.netty.internal.tcnative.SniHostNameMatcher', +// // 'io.opentelemetry.sdk.logs.data.Body', +// // 'io.opentelemetry.sdk.logs.data.LogData', +// // 'io.opentelemetry.sdk.logs.data.Severity', +// // 'io.opentelemetry.sdk.logs.export.LogExporter', +// // 'lzma.sdk.lzma.Encoder', +// // 'org.bouncycastle.cert.X509v3CertificateBuilder', +// // 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', +// // 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', +// // 'org.conscrypt.AllocatedBuffer', +// // 'org.conscrypt.BufferAllocator', +// // 'org.conscrypt.Conscrypt$ProviderBuilder', +// // 'org.conscrypt.Conscrypt', +// // 'org.conscrypt.HandshakeListener', +// // 'org.eclipse.jetty.alpn.ALPN$ClientProvider', +// // 'org.eclipse.jetty.alpn.ALPN$ServerProvider', +// // 'org.eclipse.jetty.alpn.ALPN', +// // 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', +// // 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', +// // 'org.eclipse.jetty.npn.NextProtoNego', +// // 'org.jboss.marshalling.ByteInput', +// // 'org.jboss.marshalling.ByteOutput', +// // 'org.jboss.marshalling.Marshaller', +// // 'org.jboss.marshalling.MarshallerFactory', +// // 'org.jboss.marshalling.MarshallingConfiguration', +// // 'org.jboss.marshalling.Unmarshaller', +// // 'org.slf4j.Logger', +// // 'org.slf4j.LoggerFactory', +// // 'org.slf4j.helpers.FormattingTuple', +// // 'org.slf4j.helpers.MessageFormatter', +// // 'org.slf4j.spi.LocationAwareLogger', +// // 'reactor.blockhound.BlockHound$Builder', +// // 'reactor.blockhound.integration.BlockHoundIntegration', +// ) +// } diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 new file mode 100644 index 0000000000000..2ea3c2249dd42 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 @@ -0,0 +1 @@ +bccb70b60db2ab5900f6bb91ac5a71f950365913 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/okhttp-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-LICENSE.txt similarity index 99% rename from x-pack/plugin/apm-integration/licenses/okhttp-LICENSE.txt rename to x-pack/plugin/apm-integration/licenses/elastic-apm-agent-LICENSE.txt index 261eeb9e9f8b2..953a6d21ca18e 100644 --- a/x-pack/plugin/apm-integration/licenses/okhttp-LICENSE.txt +++ b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-LICENSE.txt @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2018 Elastic and contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-NOTICE.txt new file mode 100644 index 0000000000000..1e21cb1fb60ec --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-NOTICE.txt @@ -0,0 +1,465 @@ +Elastic APM Java Agent +Copyright 2018-2022 Elasticsearch B.V. + +############################################################################### + +This product includes software licensed under the Apache License 2.0 from the +following sources: + - stagemonitor - Copyright 2014-2017 iSYS Software GmbH + - micrometer + - https://github.com/raphw/weak-lock-free + - openzipkin/brave + - LMAX Disruptor - Copyright 2011 LMAX Ltd. + - Byte Buddy (https://bytebuddy.net) - Copyright Rafael Winterhalter + - JCTools + - https://github.com/jvm-profiling-tools/async-profiler + - https://github.com/real-logic/agrona + - Apache Log4j 2 - https://logging.apache.org/log4j/2.x/license.html + +------------------------------------------------------------------------------ +stagemonitor NOTICE + +stagemonitor +Copyright 2014-2017 iSYS Software GmbH + +This product includes software developed at +iSYS Software GmbH (http://www.isys-software.de/). + +This product bundles jQuery treetable 3.2.0, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/jquery-treetable/jquery.treetable.js + +This product bundles Twitter Bootstrap 3.3.2, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/bootstrap/bootstrap.min.css + +This product bundles typeahead.js-bootstrap3.less, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/typeahead.css + +This product bundles typeahead.js, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/typeahead.jquery.min.js + +This product bundles Handlebars 1.3.0, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/handlebars.min.js + +This product bundles jQuery 1.11.1, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/jquery.1.11.1.min.js + +This product bundles jQuery serializeObject, which is available under the "BSD" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/jquery.serialize-object.min.js + +This product bundles Bootstrap Growl 2.0.1, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/bootstrap/bootstrap-growl.min.js + +This product bundles Animate.css, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/animate/animate.min.css + +This product native sigar bindings, which are available under the "Apache 2.0" license. For details, see +stagemonitor-os/src/main/resources/sigar + +This product bundles DataTables 1.10.3, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/datatables/jquery.dataTables.min.js + +This product bundles Flot, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/flot/jquery.flot.min.js + +This product bundles Flot, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/flot/jquery.flot.resize.min.js + +This product bundles Flot stack plugin, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/flot/jquery.flot.stack.original.js + +This product bundles Flot time plugin, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/flot/jquery.flot.time.min.js + +This product bundles Flot tooltip plugin, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/stagemonitor/static/flot/jquery.flot.tooltip.min.js + +This product bundles weasel, which is available under the "MIT" license. For details, see +stagemonitor-web-servlet/src/main/resources/eum.debug.js + +This product includes code derived from the Metrics project, which is available under the "Apache 2.0" license. For details, see +stagemonitor-web-servlet/src/main/java/org/stagemonitor/web/metrics/StagemonitorMetricsServlet.java + +This product includes code derived from https://github.com/raphw/weak-lock-free/, which is available under the "Apache 2.0" license. For details, see +stagemonitor-core/src/main/java/org/stagemonitor/core/instrument/WeakConcurrentMap.java + +This product includes code derived from https://github.com/prometheus/client_java/blob/master/simpleclient_dropwizard/src/main/java/io/prometheus/client/dropwizard/DropwizardExports.java, which is available under the "Apache 2.0" license. For details, see +stagemonitor-core/src/main/java/org/stagemonitor/core/metrics/prometheus/StagemonitorPrometheusCollector.java + +This product includes code from https://github.com/uber/jaeger-client-java, which is available under the "MIT" license. +stagemonitor/stagemonitor-tracing/src/main/java/org/stagemonitor/tracing/utils/RateLimiter.java + +This product includes code derived from Google Guava, which is available under the "Apache 2.0" license. For details, see +stagemonitor-core/src/main/java/org/stagemonitor/core/util/InetAddresses.java +stagemonitor-core/src/main/java/org/stagemonitor/core/util/Ints.java +stagemonitor-core/src/main/java/org/stagemonitor/core/util/Assert.java + +This product includes code from Spring Framework, which is available under the "Apache 2.0" license. For details, see +stagemonitor-web-servlet/src/main/java/org/stagemonitor/web/servlet/util/AntPathMatcher.java + +------------------------------------------------------------------------------ +async-profiler NOTICE + +async-profiler +Copyright 2018 - 2020 Andrei Pangin + +This product includes software licensed under CDDL 1.0 from the +following sources: + +This product includes a specialized C++ port of the FlameGraph script, licensed under CDDL, available at +https://github.com/brendangregg/FlameGraph/blob/master/flamegraph.pl + +Copyright 2016 Netflix, Inc. +Copyright 2011 Joyent, Inc. All rights reserved. +Copyright 2011 Brendan Gregg. All rights reserved. + +CDDL HEADER START + +The contents of this file are subject to the terms of the +Common Development and Distribution License (the "License"). +You may not use this file except in compliance with the License. + +You can obtain a copy of the license at docs/cddl1.txt or +http://opensource.org/licenses/CDDL-1.0. +See the License for the specific language governing permissions +and limitations under the License. + +When distributing Covered Code, include this CDDL HEADER in each +file and include the License file at docs/cddl1.txt. +If applicable, add the following below this CDDL HEADER, with the +fields enclosed by brackets "[]" replaced with your own identifying +information: Portions Copyright [yyyy] [name of copyright owner] + +CDDL HEADER END + +------------------------------------------------------------------------------ +Apache Log4j NOTICE + +Apache Log4j +Copyright 1999-2021 Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +ResolverUtil.java +Copyright 2005-2006 Tim Fennell + +Dumbster SMTP test server +Copyright 2004 Jason Paul Kitchen + +TypeUtil.java +Copyright 2002-2012 Ramnivas Laddad, Juergen Hoeller, Chris Beams + +picocli (http://picocli.info) +Copyright 2017 Remko Popma + +------------------------------------------------------------------------------ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +############################################################################### + +This product includes code from https://github.com/ngs-doo/dsl-json, +under The BSD 3-Clause License: + +Copyright (c) 2015, Nova Generacija Softvera d.o.o. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of Nova Generacija Softvera d.o.o. nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +############################################################################### + +This product includes code from slf4j, under MIT License. +It also includes code that is based on some slf4j interfaces. + +Copyright (c) 2004-2011 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +############################################################################### + +This product includes code from HdrHistogram, dual-licensed under CC0 and BSD 2-Clause License + +The code in this repository code was Written by Gil Tene, Michael Barker, +and Matt Warren, and released to the public domain, as explained at +http://creativecommons.org/publicdomain/zero/1.0/ + +For users of this code who wish to consume it under the "BSD" license +rather than under the public domain or CC0 contribution text mentioned +above, the code found under this directory is *also* provided under the +following license (commonly referred to as the BSD 2-Clause License). This +license does not detract from the above stated release of the code into +the public domain, and simply represents an additional license granted by +the Author. + +----------------------------------------------------------------------------- +** Beginning of "BSD 2-Clause License" text. ** + + Copyright (c) 2012, 2013, 2014, 2015, 2016 Gil Tene + Copyright (c) 2014 Michael Barker + Copyright (c) 2014 Matt Warren + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + THE POSSIBILITY OF SUCH DAMAGE. + +############################################################################### diff --git a/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 deleted file mode 100644 index 4798b37e20691..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/failureaccess-1.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1dcf1de382a0bf95a3d8b0849546c88bac1292c9 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/failureaccess-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/failureaccess-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/failureaccess-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt deleted file mode 100644 index d645695673349..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt deleted file mode 100644 index f70c5620cf75a..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-NOTICE.txt +++ /dev/null @@ -1,62 +0,0 @@ -Copyright 2014 The gRPC Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - ------------------------------------------------------------------------ - -This product contains a modified portion of 'OkHttp', an open source -HTTP & SPDY client for Android and Java applications, which can be obtained -at: - - * LICENSE: - * okhttp/third_party/okhttp/LICENSE (Apache License 2.0) - * HOMEPAGE: - * https://github.com/square/okhttp - * LOCATION_IN_GRPC: - * okhttp/third_party/okhttp - -This product contains a modified portion of 'Envoy', an open source -cloud-native high-performance edge/middle/service proxy, which can be -obtained at: - - * LICENSE: - * xds/third_party/envoy/LICENSE (Apache License 2.0) - * NOTICE: - * xds/third_party/envoy/NOTICE - * HOMEPAGE: - * https://www.envoyproxy.io - * LOCATION_IN_GRPC: - * xds/third_party/envoy - -This product contains a modified portion of 'protoc-gen-validate (PGV)', -an open source protoc plugin to generate polyglot message validators, -which can be obtained at: - - * LICENSE: - * xds/third_party/protoc-gen-validate/LICENSE (Apache License 2.0) - * NOTICE: - * xds/third_party/protoc-gen-validate/NOTICE - * HOMEPAGE: - * https://github.com/envoyproxy/protoc-gen-validate - * LOCATION_IN_GRPC: - * xds/third_party/protoc-gen-validate - -This product contains a modified portion of 'udpa', -an open source universal data plane API, which can be obtained at: - - * LICENSE: - * xds/third_party/udpa/LICENSE (Apache License 2.0) - * HOMEPAGE: - * https://github.com/cncf/udpa - * LOCATION_IN_GRPC: - * xds/third_party/udpa diff --git a/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 deleted file mode 100644 index bab20c7d44d7e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-api-1.42.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4a7f734f57ad5b68e4ac591481eb562cdb3d2a94 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 deleted file mode 100644 index b85128c4772ec..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-context-1.42.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0cc9e5e08ced39792908aeda77e694bff39cea1 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 deleted file mode 100644 index 71d9cdc9b7f1e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-core-1.42.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2d142647452a700189908baa488dc928233e8be9 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 deleted file mode 100644 index 764083f762f63..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-netty-1.42.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f2bdcaf11b237122efbd8a30e4177250fde5b458 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 deleted file mode 100644 index d3390f4e63af9..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/grpc-stub-1.42.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6fa0c2fb4ff581c89b4aab2d47fb2b568503f630 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 b/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 deleted file mode 100644 index 1906a4f95370c..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/guava-31.0.1-jre.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -119ea2b2bc205b138974d351777b20f02b92704b \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt deleted file mode 100644 index d645695673349..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/guava-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/guava-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/guava-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt deleted file mode 100644 index d645695673349..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt deleted file mode 100644 index 5bbf91a14de23..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-NOTICE.txt +++ /dev/null @@ -1,116 +0,0 @@ - - The Netty Project - ================= - -Please visit the Netty web site for more information: - - * http://netty.io/ - -Copyright 2011 The Netty Project - -The Netty Project licenses this file to you under the Apache License, -version 2.0 (the "License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at: - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -License for the specific language governing permissions and limitations -under the License. - -Also, please refer to each LICENSE..txt file, which is located in -the 'license' directory of the distribution file, for the license terms of the -components that this product depends on. - -------------------------------------------------------------------------------- -This product contains the extensions to Java Collections Framework which has -been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: - - * LICENSE: - * license/LICENSE.jsr166y.txt (Public Domain) - * HOMEPAGE: - * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ - * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ - -This product contains a modified version of Robert Harder's Public Domain -Base64 Encoder and Decoder, which can be obtained at: - - * LICENSE: - * license/LICENSE.base64.txt (Public Domain) - * HOMEPAGE: - * http://iharder.sourceforge.net/current/java/base64/ - -This product contains a modified version of 'JZlib', a re-implementation of -zlib in pure Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.jzlib.txt (BSD Style License) - * HOMEPAGE: - * http://www.jcraft.com/jzlib/ - -This product contains a modified version of 'Webbit', a Java event based -WebSocket and HTTP server: - - * LICENSE: - * license/LICENSE.webbit.txt (BSD License) - * HOMEPAGE: - * https://github.com/joewalnes/webbit - -This product optionally depends on 'Protocol Buffers', Google's data -interchange format, which can be obtained at: - - * LICENSE: - * license/LICENSE.protobuf.txt (New BSD License) - * HOMEPAGE: - * http://code.google.com/p/protobuf/ - -This product optionally depends on 'Bouncy Castle Crypto APIs' to generate -a temporary self-signed X.509 certificate when the JVM does not provide the -equivalent functionality. It can be obtained at: - - * LICENSE: - * license/LICENSE.bouncycastle.txt (MIT License) - * HOMEPAGE: - * http://www.bouncycastle.org/ - -This product optionally depends on 'SLF4J', a simple logging facade for Java, -which can be obtained at: - - * LICENSE: - * license/LICENSE.slf4j.txt (MIT License) - * HOMEPAGE: - * http://www.slf4j.org/ - -This product optionally depends on 'Apache Commons Logging', a logging -framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-logging.txt (Apache License 2.0) - * HOMEPAGE: - * http://commons.apache.org/logging/ - -This product optionally depends on 'Apache Log4J', a logging framework, -which can be obtained at: - - * LICENSE: - * license/LICENSE.log4j.txt (Apache License 2.0) - * HOMEPAGE: - * http://logging.apache.org/log4j/ - -This product optionally depends on 'JBoss Logging', a logging framework, -which can be obtained at: - - * LICENSE: - * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) - * HOMEPAGE: - * http://anonsvn.jboss.org/repos/common/common-logging-spi/ - -This product optionally depends on 'Apache Felix', an open source OSGi -framework implementation, which can be obtained at: - - * LICENSE: - * license/LICENSE.felix.txt (Apache License 2.0) - * HOMEPAGE: - * http://felix.apache.org/ diff --git a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 deleted file mode 100644 index d667ddc5111f2..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-buffer-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fd49b6a3a7aa2e5d4922cf125b52d880c1a8b7bd \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 deleted file mode 100644 index 057f44bbd7831..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cbc1d14c827a27cef5d6583f8978445c8b4445d2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 deleted file mode 100644 index 82f26e527ec26..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-http-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -73c7bd6341cb59feab6f56200b1e2d908b054fd4 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 deleted file mode 100644 index e5f4d78b1ce4e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-codec-http2-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5eaaf9147527ec435fbecf3c57f5b8264886d126 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 deleted file mode 100644 index 9705a19ec50ad..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-common-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -891b8ad3206469762b20c73f45d0d2e24cff3dd2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 deleted file mode 100644 index 0ed1df7b49273..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-handler-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fc9d000dfaea5719192929f943357a89f1cbf81c \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 deleted file mode 100644 index aa8170f51cd2f..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-resolver-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9209265687a125259fe0396b57d8ccc79697d40e \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 b/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 deleted file mode 100644 index 825ded05e1283..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/netty-transport-4.1.74.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d760fb9f5d12c93887e171c442e30862a9898d59 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 b/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 deleted file mode 100644 index b6f5113e444cc..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/okhttp-3.14.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3e6d101343c7ea687cd593e4990f73b25c878383 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/okhttp-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/okhttp-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 b/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 deleted file mode 100644 index bf2e361cabc50..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/okio-1.17.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -78c7820b205002da4d2d137f6f312bd64b3d6049 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/okio-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/okio-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/okio-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 deleted file mode 100644 index dafb3e3dc1241..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-metrics-1.9.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db9e872c623f59e84e520f28a6af2baf1a4d2001 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 deleted file mode 100644 index 8b0fd579dcfde..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-logging-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3e8ff4f8da800522a6cc7c64eef0b2dc608f8c16 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 deleted file mode 100644 index 31a9e9a11a774..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-common-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ae38f65225d617d80d7b6b4abf109b6edda08112 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 deleted file mode 100644 index cbc26ba2d7da1..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-exporter-otlp-trace-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -395fbd7c26796cf5233f003afc20fad7479f9a6f \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 deleted file mode 100644 index 53b409c6d62a6..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fde191eb694e83a3df923544a920bc7187a15e6 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 deleted file mode 100644 index e6dde0fe6e543..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-common-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f834a3cfb30e7a80768b2205940d4e6d203b9e7a diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 deleted file mode 100644 index 92030be5e7dce..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-metrics-1.9.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -247a4cbc19fc934d19e442900bd1b115914fd132 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 deleted file mode 100644 index d5f9b677edeb8..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-sdk-trace-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d7994581e392bc43f4bdb529f1e19ace8625f41a diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/perfmark-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt deleted file mode 100644 index 63e4853415dff..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/perfmark-NOTICE.txt +++ /dev/null @@ -1,41 +0,0 @@ - -Copyright 2019 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - ------------------------------------------------------------------------ - -This product contains a modified portion of 'Catapult', an open source -Trace Event viewer for Chome, Linux, and Android applications, which can -be obtained at: - - * LICENSE: - * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/catapult/LICENSE (New BSD License) - * HOMEPAGE: - * https://github.com/catapult-project/catapult - -This product contains a modified portion of 'Polymer', a library for Web -Components, which can be obtained at: - * LICENSE: - * traceviewer/src/main/resources/io/perfmark/traceviewer/third_party/polymer/LICENSE (New BSD License) - * HOMEPAGE: - * https://github.com/Polymer/polymer - - -This product contains a modified portion of 'ASM', an open source -Java Bytecode library, which can be obtained at: - - * LICENSE: - * agent/src/main/resources/io/perfmark/agent/third_party/asm/LICENSE (BSD style License) - * HOMEPAGE: - * https://asm.ow2.io/ diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 deleted file mode 100644 index 15b718b038ff5..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/perfmark-api-0.24.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -135f31424e015f26aa8af8f6df8add4490acac22 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 deleted file mode 100644 index 7d2e3e0d40f4e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/perfmark-impl-0.24.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7c0a611d5eda67cc8dfddad9af1c626ed3da91a2 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 305d4054c087b..30ce65713b2ae 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -28,6 +28,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -118,13 +120,14 @@ protected void doClose() { } private void createApmServices() { - assert enabled; - - var openTelemetry = GlobalOpenTelemetry.get(); - var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); - + assert this.enabled; assert this.services == null; - this.services = new APMServices(tracer, openTelemetry); + + this.services = AccessController.doPrivileged((PrivilegedAction) () -> { + var openTelemetry = GlobalOpenTelemetry.get(); + var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); + return new APMServices(tracer, openTelemetry); + }); } private void destroyApmServices() { @@ -148,63 +151,66 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { } spans.computeIfAbsent(traceable.getSpanId(), spanId -> { - // services might be in shutdown state by this point, but this is handled by the open telemetry internally - final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(); - if (parentContext != null) { - spanBuilder.setParent(parentContext); - } + return AccessController.doPrivileged((PrivilegedAction) () -> { + // services might be in shutdown state by this point, but this is handled by the open telemetry internally + final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); + Context parentContext = getParentSpanContext(); + if (parentContext != null) { + spanBuilder.setParent(parentContext); + } - for (Map.Entry entry : traceable.getAttributes().entrySet()) { - final Object value = entry.getValue(); - if (value instanceof String) { - spanBuilder.setAttribute(entry.getKey(), (String) value); - } else if (value instanceof Long) { - spanBuilder.setAttribute(entry.getKey(), (Long) value); - } else if (value instanceof Integer) { - spanBuilder.setAttribute(entry.getKey(), (Integer) value); - } else if (value instanceof Double) { - spanBuilder.setAttribute(entry.getKey(), (Double) value); - } else if (value instanceof Boolean) { - spanBuilder.setAttribute(entry.getKey(), (Boolean) value); - } else { - throw new IllegalArgumentException( - "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" - ); + for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Object value = entry.getValue(); + if (value instanceof String) { + spanBuilder.setAttribute(entry.getKey(), (String) value); + } else if (value instanceof Long) { + spanBuilder.setAttribute(entry.getKey(), (Long) value); + } else if (value instanceof Integer) { + spanBuilder.setAttribute(entry.getKey(), (Integer) value); + } else if (value instanceof Double) { + spanBuilder.setAttribute(entry.getKey(), (Double) value); + } else if (value instanceof Boolean) { + spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + } else { + throw new IllegalArgumentException( + "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" + ); + } } - } - // hack transactions to avoid the 'custom' type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); + // hack transactions to avoid the 'custom' type + // this one is not part of OTel semantic attributes + spanBuilder.setAttribute("type", "elasticsearch"); - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); - // this will duplicate the "resource attributes" that are defined globally - // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in - // 7.16. - spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); + // this will duplicate the "resource attributes" that are defined globally + // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in + // 7.16. + spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); - final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); - if (xOpaqueId != null) { - spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); - } - final Span span = spanBuilder.startSpan(); + final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); + if (xOpaqueId != null) { + spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); + } + + final Span span = spanBuilder.startSpan(); - final Map spanHeaders = new HashMap<>(); - final Context contextForNewSpan = Context.current().with(span); - services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); - spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + final Map spanHeaders = new HashMap<>(); + final Context contextForNewSpan = Context.current().with(span); + services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); + spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - // Ignore the result here, we don't need to restore the threadContext - threadContext.removeRequestHeaders(TRACE_HEADERS); - threadContext.putHeader(spanHeaders); + // Ignore the result here, we don't need to restore the threadContext + threadContext.removeRequestHeaders(TRACE_HEADERS); + threadContext.putHeader(spanHeaders); - return span; + return span; + }); }); } diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index db2a5fd74951a..d8cf75c7a7e4d 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -13,12 +13,16 @@ grant { // required by io.grpc.internal.DnsNameResolver in grpc-core permission java.net.NetPermission "getProxySelector"; permission java.lang.RuntimePermission "getClassLoader"; + permission java.lang.RuntimePermission "createClassLoader"; permission java.net.SocketPermission "*", "connect,resolve"; + permission java.lang.RuntimePermission "accessSystemModules"; }; grant codeBase "${codebase.elastic-apm-agent}" { - permission java.lang.RuntimePermission "setFactory"; - permission java.lang.RuntimePermission "getClassLoader"; - permission java.lang.RuntimePermission "setContextClassLoader"; - permission java.net.SocketPermission "*", "connect,resolve"; + permission java.lang.RuntimePermission "setFactory"; + permission java.lang.RuntimePermission "getClassLoader"; + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.net.SocketPermission "*", "connect,resolve"; + permission java.lang.RuntimePermission "accessSystemModules"; }; From b7727140c7635e89455388a577b71c07589f9f66 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 23 Mar 2022 10:15:01 +0000 Subject: [PATCH 33/90] Don't log graphviz by default --- .../src/main/java/org/elasticsearch/xpack/apm/APMTracer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 73c0ed6f2a25b..586b9022d3caf 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -293,7 +293,7 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { threadContext.removeRequestHeaders(TRACE_HEADERS); threadContext.putHeader(spanHeaders); - logGraphviz(span); + // logGraphviz(span); return span; }); From 2f45a5151413fd93625cddf545d3080c66e5a783 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 23 Mar 2022 16:10:55 +0000 Subject: [PATCH 34/90] Tweaks --- .../cluster/InternalClusterInfoService.java | 189 +++++++++--------- x-pack/plugin/apm-integration/build.gradle | 2 + .../elasticsearch/xpack/apm/APMTracer.java | 25 +-- 3 files changed, 112 insertions(+), 104 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 57980ec033f87..fcbc62f65e1f2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.monitor.fs.FsInfo; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -171,120 +172,124 @@ void execute() { nodesStatsRequest.clear(); nodesStatsRequest.addMetric(NodesStatsRequest.Metric.FS.metricName()); nodesStatsRequest.timeout(fetchTimeout); - client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(NodesStatsResponse nodesStatsResponse) { - logger.trace("received node stats response"); - - for (final FailedNodeException failure : nodesStatsResponse.failures()) { - logger.warn( - new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), - failure.getCause() + try (var ignored = threadPool.getThreadContext().stashContext()) { + client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(NodesStatsResponse nodesStatsResponse) { + logger.trace("received node stats response"); + + for (final FailedNodeException failure : nodesStatsResponse.failures()) { + logger.warn( + new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), + failure.getCause() + ); + } + + ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); + fillDiskUsagePerNode( + adjustNodesStats(nodesStatsResponse.getNodes()), + leastAvailableUsagesBuilder, + mostAvailableUsagesBuilder ); + leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); + mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); } - ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); - fillDiskUsagePerNode( - adjustNodesStats(nodesStatsResponse.getNodes()), - leastAvailableUsagesBuilder, - mostAvailableUsagesBuilder - ); - leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); - mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve node stats", e); - } else { - logger.warn("failed to retrieve node stats", e); + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve node stats", e); + } else { + logger.warn("failed to retrieve node stats", e); + } + leastAvailableSpaceUsages = ImmutableOpenMap.of(); + mostAvailableSpaceUsages = ImmutableOpenMap.of(); } - leastAvailableSpaceUsages = ImmutableOpenMap.of(); - mostAvailableSpaceUsages = ImmutableOpenMap.of(); - } - }, this::onStatsProcessed)); + }, this::onStatsProcessed)); + } final IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); indicesStatsRequest.clear(); indicesStatsRequest.store(true); indicesStatsRequest.indicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_CLOSED_HIDDEN); indicesStatsRequest.timeout(fetchTimeout); - client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(IndicesStatsResponse indicesStatsResponse) { - logger.trace("received indices stats response"); - - if (indicesStatsResponse.getShardFailures().length > 0) { - final Set failedNodeIds = new HashSet<>(); - for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { - if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { - if (failedNodeIds.add(failedNodeException.nodeId())) { + try (var ignored = threadPool.getThreadContext().stashContext()) { + client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(IndicesStatsResponse indicesStatsResponse) { + logger.trace("received indices stats response"); + + if (indicesStatsResponse.getShardFailures().length > 0) { + final Set failedNodeIds = new HashSet<>(); + for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { + if (shardFailure.getCause() instanceof final FailedNodeException failedNodeException) { + if (failedNodeIds.add(failedNodeException.nodeId())) { + logger.warn( + new ParameterizedMessage( + "failed to retrieve shard stats from node [{}]", + failedNodeException.nodeId() + ), + failedNodeException.getCause() + ); + } + logger.trace( + new ParameterizedMessage( + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() + ); + } else { logger.warn( new ParameterizedMessage( - "failed to retrieve shard stats from node [{}]", - failedNodeException.nodeId() + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() ), - failedNodeException.getCause() + shardFailure.getCause() ); } - logger.trace( - new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); - } else { - logger.warn( - new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); } } - } - final ShardStats[] stats = indicesStatsResponse.getShards(); - final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); - final Map reservedSpaceBuilders = new HashMap<>(); - buildShardLevelInfo( - stats, - shardSizeByIdentifierBuilder, - shardDataSetSizeBuilder, - dataPathByShardRoutingBuilder, - reservedSpaceBuilders - ); + final ShardStats[] stats = indicesStatsResponse.getShards(); + final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); + final Map reservedSpaceBuilders = new HashMap<>(); + buildShardLevelInfo( + stats, + shardSizeByIdentifierBuilder, + shardDataSetSizeBuilder, + dataPathByShardRoutingBuilder, + reservedSpaceBuilders + ); - final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap - .builder(); - reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); + final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap + .builder(); + reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); - indicesStatsSummary = new IndicesStatsSummary( - shardSizeByIdentifierBuilder.build(), - shardDataSetSizeBuilder.build(), - dataPathByShardRoutingBuilder.build(), - rsrvdSpace.build() - ); - } + indicesStatsSummary = new IndicesStatsSummary( + shardSizeByIdentifierBuilder.build(), + shardDataSetSizeBuilder.build(), + dataPathByShardRoutingBuilder.build(), + rsrvdSpace.build() + ); + } - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve indices stats", e); - } else { - logger.warn("failed to retrieve indices stats", e); + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve indices stats", e); + } else { + logger.warn("failed to retrieve indices stats", e); + } + indicesStatsSummary = IndicesStatsSummary.EMPTY; } - indicesStatsSummary = IndicesStatsSummary.EMPTY; - } - }, this::onStatsProcessed)); + }, this::onStatsProcessed)); + } } private void onStatsProcessed() { diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index a9e4b567dfb9e..74f76de98bbb5 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -120,6 +120,8 @@ tasks.named("thirdPartyAudit").configure { 'com.aayushatharva.brotli4j.encoder.Encoder$Mode', 'com.aayushatharva.brotli4j.encoder.Encoder$Parameters', 'com.aayushatharva.brotli4j.encoder.Encoder', + 'com.fasterxml.jackson.core.JsonFactory', + 'com.fasterxml.jackson.core.JsonGenerator', 'com.github.luben.zstd.Zstd', 'com.google.gson.stream.JsonReader', 'com.google.gson.stream.JsonToken', diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 586b9022d3caf..869b83bffd744 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -20,7 +20,6 @@ import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; @@ -50,7 +49,6 @@ import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -269,8 +267,8 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); } -// spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); -// spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); + // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); + // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in @@ -371,11 +369,6 @@ public void onTraceStopped(Traceable traceable) { if (span != null) { span.end(); } - // TODO: geoip-downloader[c] isn't getting stopped? - // LOGGER.warn( - // "Active spans after stopped trace: {}", - // spans.values().stream().map(Tuple::v1).map(span -> ((ReadWriteSpan) span).getName()).toList() - // ); } @Override @@ -488,10 +481,18 @@ private static void logGraphviz(Span span) { j = spanStr.indexOf(",", i); String spanName = spanStr.substring(i + 5, j); -// LOGGER.warn("BADGER: {}", span); - if (CACHE.add(spanId)) { - LOGGER.warn("BADGER: __{} [label=\"{}\"]", spanId, spanName); + Map attrs = new HashMap<>(); + attrs.put("label", spanName); + if (spanName.startsWith("internal:")) { + attrs.put("style", "filled"); + attrs.put("fillcolor", "pink"); + } + final String attrsString = attrs.entrySet() + .stream() + .map(each -> each.getKey() + "=\"" + each.getValue() + "\"") + .collect(Collectors.joining(",")); + LOGGER.warn("BADGER: __{} [{}]", spanId, attrsString); } if (parentSpanId != null) { From ec612a6446e971caec9e737ca94a069039b2e0f0 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 29 Mar 2022 13:12:28 +0100 Subject: [PATCH 35/90] Rework trace header stashing When starting a new tracing context, move the existing trace headers in the thread context so that they are prefixed with `parent_`. This then means that code elsewhere can add new trace headers without having to fiddle with the context first. Also wrap methods in TaskManager to better manage context around tasks. --- .../cluster/InternalClusterInfoService.java | 189 +++++++++--------- .../common/util/concurrent/ThreadContext.java | 28 +++ .../elasticsearch/rest/RestController.java | 4 +- .../org/elasticsearch/tasks/TaskManager.java | 111 +++++----- .../elasticsearch/xpack/apm/APMTracer.java | 12 +- .../security/authz/AuthorizationService.java | 78 ++++---- 6 files changed, 230 insertions(+), 192 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index fcbc62f65e1f2..57980ec033f87 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -172,124 +171,120 @@ void execute() { nodesStatsRequest.clear(); nodesStatsRequest.addMetric(NodesStatsRequest.Metric.FS.metricName()); nodesStatsRequest.timeout(fetchTimeout); - try (var ignored = threadPool.getThreadContext().stashContext()) { - client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(NodesStatsResponse nodesStatsResponse) { - logger.trace("received node stats response"); - - for (final FailedNodeException failure : nodesStatsResponse.failures()) { - logger.warn( - new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), - failure.getCause() - ); - } - - ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); - fillDiskUsagePerNode( - adjustNodesStats(nodesStatsResponse.getNodes()), - leastAvailableUsagesBuilder, - mostAvailableUsagesBuilder + client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(NodesStatsResponse nodesStatsResponse) { + logger.trace("received node stats response"); + + for (final FailedNodeException failure : nodesStatsResponse.failures()) { + logger.warn( + new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), + failure.getCause() ); - leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); - mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); } - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve node stats", e); - } else { - logger.warn("failed to retrieve node stats", e); - } - leastAvailableSpaceUsages = ImmutableOpenMap.of(); - mostAvailableSpaceUsages = ImmutableOpenMap.of(); + ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); + fillDiskUsagePerNode( + adjustNodesStats(nodesStatsResponse.getNodes()), + leastAvailableUsagesBuilder, + mostAvailableUsagesBuilder + ); + leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); + mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve node stats", e); + } else { + logger.warn("failed to retrieve node stats", e); } - }, this::onStatsProcessed)); - } + leastAvailableSpaceUsages = ImmutableOpenMap.of(); + mostAvailableSpaceUsages = ImmutableOpenMap.of(); + } + }, this::onStatsProcessed)); final IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); indicesStatsRequest.clear(); indicesStatsRequest.store(true); indicesStatsRequest.indicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_CLOSED_HIDDEN); indicesStatsRequest.timeout(fetchTimeout); - try (var ignored = threadPool.getThreadContext().stashContext()) { - client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(IndicesStatsResponse indicesStatsResponse) { - logger.trace("received indices stats response"); - - if (indicesStatsResponse.getShardFailures().length > 0) { - final Set failedNodeIds = new HashSet<>(); - for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { - if (shardFailure.getCause() instanceof final FailedNodeException failedNodeException) { - if (failedNodeIds.add(failedNodeException.nodeId())) { - logger.warn( - new ParameterizedMessage( - "failed to retrieve shard stats from node [{}]", - failedNodeException.nodeId() - ), - failedNodeException.getCause() - ); - } - logger.trace( - new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); - } else { + client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(IndicesStatsResponse indicesStatsResponse) { + logger.trace("received indices stats response"); + + if (indicesStatsResponse.getShardFailures().length > 0) { + final Set failedNodeIds = new HashSet<>(); + for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { + if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { + if (failedNodeIds.add(failedNodeException.nodeId())) { logger.warn( new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() + "failed to retrieve shard stats from node [{}]", + failedNodeException.nodeId() ), - shardFailure.getCause() + failedNodeException.getCause() ); } + logger.trace( + new ParameterizedMessage( + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() + ); + } else { + logger.warn( + new ParameterizedMessage( + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() + ); } } + } - final ShardStats[] stats = indicesStatsResponse.getShards(); - final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); - final Map reservedSpaceBuilders = new HashMap<>(); - buildShardLevelInfo( - stats, - shardSizeByIdentifierBuilder, - shardDataSetSizeBuilder, - dataPathByShardRoutingBuilder, - reservedSpaceBuilders - ); + final ShardStats[] stats = indicesStatsResponse.getShards(); + final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); + final Map reservedSpaceBuilders = new HashMap<>(); + buildShardLevelInfo( + stats, + shardSizeByIdentifierBuilder, + shardDataSetSizeBuilder, + dataPathByShardRoutingBuilder, + reservedSpaceBuilders + ); - final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap - .builder(); - reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); + final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap + .builder(); + reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); - indicesStatsSummary = new IndicesStatsSummary( - shardSizeByIdentifierBuilder.build(), - shardDataSetSizeBuilder.build(), - dataPathByShardRoutingBuilder.build(), - rsrvdSpace.build() - ); - } + indicesStatsSummary = new IndicesStatsSummary( + shardSizeByIdentifierBuilder.build(), + shardDataSetSizeBuilder.build(), + dataPathByShardRoutingBuilder.build(), + rsrvdSpace.build() + ); + } - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve indices stats", e); - } else { - logger.warn("failed to retrieve indices stats", e); - } - indicesStatsSummary = IndicesStatsSummary.EMPTY; + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve indices stats", e); + } else { + logger.warn("failed to retrieve indices stats", e); } - }, this::onStatsProcessed)); - } + indicesStatsSummary = IndicesStatsSummary.EMPTY; + } + }, this::onStatsProcessed)); } private void onStatsProcessed() { diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index d127eaa7457dc..af897436b2d56 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpTransportSettings; +import org.elasticsearch.tasks.Task; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -131,6 +132,33 @@ public StoredContext stashContext() { }; } + public StoredContext newTraceContext() { + final ThreadContextStruct context = threadLocal.get(); + final Map newRequestHeaders = new HashMap<>(context.requestHeaders); + final Map newTransientHeaders = new HashMap<>(context.transientHeaders); + + final String previousTraceParent = newRequestHeaders.remove(Task.TRACE_PARENT_HTTP_HEADER); + final String previousTraceState = newRequestHeaders.remove(Task.TRACE_STATE); + + if (previousTraceParent != null) { + newTransientHeaders.put("parent_" + Task.TRACE_PARENT_HTTP_HEADER, previousTraceParent); + } + if (previousTraceState != null) { + newTransientHeaders.put("parent_" + Task.TRACE_STATE, previousTraceState); + } + + threadLocal.set( + new ThreadContextStruct( + newRequestHeaders, + context.responseHeaders, + newTransientHeaders, + context.isSystemContext, + context.warningHeadersSize + ) + ); + return () -> threadLocal.set(context); + } + private Map headers(ThreadContextStruct context) { Map map = Maps.newMapWithExpectedSize(org.elasticsearch.tasks.Task.HEADERS_TO_COPY.size()); for (String header : org.elasticsearch.tasks.Task.HEADERS_TO_COPY) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 561ccdb389d13..b57773d9a2924 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -504,8 +504,10 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t if (traceparent.length() >= 55) { final String traceId = traceparent.substring(3, 35); threadContext.putHeader(Task.TRACE_ID, traceId); - threadContext.putHeader(Task.TRACE_PARENT_HTTP_HEADER, traceparent); + threadContext.putHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); } + } else if (name.equals(Task.TRACE_STATE)) { + threadContext.putHeader("parent_" + Task.TRACE_STATE, distinctHeaderValues.get(0)); } else { threadContext.putHeader(name, String.join(",", distinctHeaderValues)); } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 10087ac603c3a..6bc2370fc38c6 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -121,31 +121,34 @@ public Task register(String type, String action, TaskAwareRequest request) { long headerSize = 0; long maxSize = maxHeaderSize.getBytes(); ThreadContext threadContext = threadPool.getThreadContext(); - for (String key : taskHeaders) { - String httpHeader = threadContext.getHeader(key); - if (httpHeader != null) { - headerSize += key.length() * 2 + httpHeader.length() * 2; - if (headerSize > maxSize) { - throw new IllegalArgumentException("Request exceeded the maximum size of task headers " + maxHeaderSize); + + try (var ignored = threadContext.newTraceContext()) { + for (String key : taskHeaders) { + String httpHeader = threadContext.getHeader(key); + if (httpHeader != null) { + headerSize += key.length() * 2 + httpHeader.length() * 2; + if (headerSize > maxSize) { + throw new IllegalArgumentException("Request exceeded the maximum size of task headers " + maxHeaderSize); + } + headers.put(key, httpHeader); } - headers.put(key, httpHeader); } - } - Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action, request.getParentTask(), headers); - Objects.requireNonNull(task); - assert task.getParentTaskId().equals(request.getParentTask()) : "Request [ " + request + "] didn't preserve it parentTaskId"; - if (logger.isTraceEnabled()) { - logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); - } + Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action, request.getParentTask(), headers); + Objects.requireNonNull(task); + assert task.getParentTaskId().equals(request.getParentTask()) : "Request [ " + request + "] didn't preserve it parentTaskId"; + if (logger.isTraceEnabled()) { + logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); + } - if (task instanceof CancellableTask) { - registerCancellableTask(task); - } else { - Task previousTask = tasks.put(task.getId(), task); - assert previousTask == null; - taskTracer.onTaskRegistered(threadContext, task); + if (task instanceof CancellableTask) { + registerCancellableTask(task); + } else { + Task previousTask = tasks.put(task.getId(), task); + assert previousTask == null; + taskTracer.onTaskRegistered(threadContext, task); + } + return task; } - return task; } public Task registerAndExecute( @@ -161,43 +164,45 @@ public Task reg } else { unregisterChildNode = () -> {}; } - final Task task; - try { - task = register(type, action.actionName, request); - } catch (TaskCancelledException e) { - unregisterChildNode.close(); - throw e; - } - // NOTE: ActionListener cannot infer Response, see https://bugs.openjdk.java.net/browse/JDK-8203195 - action.execute(task, request, new ActionListener() { - @Override - public void onResponse(Response response) { - try { - release(); - } finally { - taskListener.onResponse(task, response); - } + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + final Task task; + try { + task = register(type, action.actionName, request); + } catch (TaskCancelledException e) { + unregisterChildNode.close(); + throw e; } + // NOTE: ActionListener cannot infer Response, see https://bugs.openjdk.java.net/browse/JDK-8203195 + action.execute(task, request, new ActionListener() { + @Override + public void onResponse(Response response) { + try { + release(); + } finally { + taskListener.onResponse(task, response); + } + } - @Override - public void onFailure(Exception e) { - try { - release(); - } finally { - taskListener.onFailure(task, e); + @Override + public void onFailure(Exception e) { + try { + release(); + } finally { + taskListener.onFailure(task, e); + } } - } - @Override - public String toString() { - return this.getClass().getName() + "{" + taskListener + "}{" + task + "}"; - } + @Override + public String toString() { + return this.getClass().getName() + "{" + taskListener + "}{" + task + "}"; + } - private void release() { - Releasables.close(unregisterChildNode, () -> unregister(task)); - } - }); - return task; + private void release() { + Releasables.close(unregisterChildNode, () -> unregister(task)); + } + }); + return task; + } } private void registerCancellableTask(Task task) { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 869b83bffd744..a78e0460631b1 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -287,8 +287,6 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - // Ignore the result here, we don't need to restore the threadContext - threadContext.removeRequestHeaders(TRACE_HEADERS); threadContext.putHeader(spanHeaders); // logGraphviz(span); @@ -346,11 +344,13 @@ private boolean isSpanNameIncluded(String name) { } private Context getParentSpanContext() { - // Check for a parent context in the thread context - String traceParentHeader = threadPool.getThreadContext().getHeader(Task.TRACE_PARENT_HTTP_HEADER); - String traceStateHeader = threadPool.getThreadContext().getHeader(Task.TRACE_STATE); + // Check for a parent context in the thread context. + final ThreadContext threadContext = threadPool.getThreadContext(); + final String traceParentHeader = threadContext.getHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + final String traceStateHeader = threadContext.getHeader("parent_" + Task.TRACE_STATE); + if (traceParentHeader != null) { - Map traceContextMap = new HashMap<>(); + final Map traceContextMap = new HashMap<>(2); // traceparent and tracestate should match the keys used by W3CTraceContextPropagator traceContextMap.put(Task.TRACE_PARENT_HTTP_HEADER, traceParentHeader); if (traceStateHeader != null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 80bfc9dce4588..f6c95c1638534 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -225,7 +225,7 @@ public void authorize( final AuthorizationContext enclosingContext = extractAuthorizationContext(threadContext, action); - final Runnable tracer = maybeStartTracing(enclosingContext, authentication, action, originalRequest); + Runnable stopTracing = null; /* authorization fills in certain transient headers, which must be observed in the listener (action handler execution) * as well, but which must not bleed across different action context (eg parent-child action contexts). @@ -233,47 +233,55 @@ public void authorize( * Therefore we begin by clearing the existing ones up, as they might already be set during the authorization of a * previous parent action that ran under the same thread context (also on the same node). * When the returned {@code StoredContext} is closed, ALL the original headers are restored. + * + * We also clear tracing-related headers */ try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false, ACTION_SCOPE_AUTHORIZATION_KEYS)) { - // this does not clear {@code AuthorizationServiceField.ORIGINATING_ACTION_KEY} - // prior to doing any authorization lets set the originating action in the thread context - // the originating action is the current action if no originating action has yet been set in the current thread context - // if there is already an original action, that stays put (eg. the current action is a child action) - putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); - - final String auditId; - try { - auditId = requireAuditId(authentication, action, originalRequest); - } catch (ElasticsearchSecurityException e) { - listener.onFailure(e); - return; - } + // FIXME improve this + try (var ignore2 = threadContext.newTraceContext()) { + stopTracing = maybeStartTracing(enclosingContext, authentication, action, originalRequest); + // this does not clear {@code AuthorizationServiceField.ORIGINATING_ACTION_KEY} + // prior to doing any authorization lets set the originating action in the thread context + // the originating action is the current action if no originating action has yet been set in the current thread context + // if there is already an original action, that stays put (eg. the current action is a child action) + putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); + + final String auditId; + try { + auditId = requireAuditId(authentication, action, originalRequest); + } catch (ElasticsearchSecurityException e) { + listener.onFailure(e); + return; + } - // sometimes a request might be wrapped within another, which is the case for proxied - // requests and concrete shard requests - final TransportRequest unwrappedRequest = maybeUnwrapRequest(authentication, originalRequest, action, auditId); + // sometimes a request might be wrapped within another, which is the case for proxied + // requests and concrete shard requests + final TransportRequest unwrappedRequest = maybeUnwrapRequest(authentication, originalRequest, action, auditId); - try { - checkOperatorPrivileges(authentication, action, originalRequest); - } catch (ElasticsearchException e) { - listener.onFailure(e); - return; - } + try { + checkOperatorPrivileges(authentication, action, originalRequest); + } catch (ElasticsearchException e) { + listener.onFailure(e); + return; + } - if (SystemUser.is(authentication.getUser())) { - // this never goes async so no need to wrap the listener - authorizeSystemUser(authentication, action, auditId, unwrappedRequest, listener); - } else { - final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); - final AuthorizationEngine engine = getAuthorizationEngine(authentication); - final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { - threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); - maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); - }, listener::onFailure), threadContext); - engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); + if (SystemUser.is(authentication.getUser())) { + // this never goes async so no need to wrap the listener + authorizeSystemUser(authentication, action, auditId, unwrappedRequest, listener); + } else { + final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); + final AuthorizationEngine engine = getAuthorizationEngine(authentication); + final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { + threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); + }, listener::onFailure), threadContext); + engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); + } } } finally { - tracer.run(); + if (stopTracing != null) { + stopTracing.run(); + } } } From b633f7e3fdb56173d643caabbc7d781749cf7701 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 29 Mar 2022 15:22:05 +0100 Subject: [PATCH 36/90] Fixes --- .../java/org/elasticsearch/node/Node.java | 1 + .../rest/AbstractRestChannel.java | 1 - .../org/elasticsearch/xpack/apm/ApmIT.java | 17 +++++----- .../elasticsearch/xpack/apm/APMTracer.java | 33 ++++++++++--------- .../security/authz/AuthorizationService.java | 11 ++++--- 5 files changed, 33 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 35a115e0b0e29..c8478cf5bcc0e 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -214,6 +214,7 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; + import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index 29e6e7b957b3e..00ea6b2f38ebb 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index d5e9bb981a746..7bf164e30f0eb 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.cluster.coordination.PublicationTransportHandler; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -66,14 +65,14 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); -// ((MockSecureSettings) builder.getSecureSettings()).setString( -// APMTracer.APM_ENDPOINT_SETTING.getKey(), -// System.getProperty("tests.apm.endpoint", "") -// ); -// ((MockSecureSettings) builder.getSecureSettings()).setString( -// APMTracer.APM_TOKEN_SETTING.getKey(), -// System.getProperty("tests.apm.token", "") -// ); + // ((MockSecureSettings) builder.getSecureSettings()).setString( + // APMTracer.APM_ENDPOINT_SETTING.getKey(), + // System.getProperty("tests.apm.endpoint", "") + // ); + // ((MockSecureSettings) builder.getSecureSettings()).setString( + // APMTracer.APM_TOKEN_SETTING.getKey(), + // System.getProperty("tests.apm.token", "") + // ); builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); return builder.build(); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index e676498bcc449..b58cd6f33fa50 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -180,20 +180,20 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { } } - // These attributes don't apply to HTTP spans. The APM server can infer a number of things - // when "http." attributes are present - if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { - // hack transactions to avoid the 'custom' transaction type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); - } + // These attributes don't apply to HTTP spans. The APM server can infer a number of things + // when "http." attributes are present + if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { + // hack transactions to avoid the 'custom' transaction type + // this one is not part of OTel semantic attributes + spanBuilder.setAttribute("type", "elasticsearch"); + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); + } - // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); - // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); + // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); + // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in @@ -213,11 +213,12 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - threadContext.putHeader(spanHeaders); + threadContext.putHeader(spanHeaders); - // logGraphviz(span); + // logGraphviz(span); - return span; + return span; + }); }); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index f6c95c1638534..0aa3de6537f41 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -271,10 +271,13 @@ public void authorize( } else { final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); final AuthorizationEngine engine = getAuthorizationEngine(authentication); - final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { - threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); - maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); - }, listener::onFailure), threadContext); + final ActionListener authzInfoListener = wrapPreservingContext( + ActionListener.wrap(authorizationInfo -> { + threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); + }, listener::onFailure), + threadContext + ); engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); } } From 4810a7fbd69a212d86c326058fc08f5cb5a3c478 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 10:33:20 +0100 Subject: [PATCH 37/90] Managed to get traces to ship if I hack the APM agent --- build-tools-internal/version.properties | 3 +- run.sh | 23 ++- .../elasticsearch/bootstrap/security.policy | 5 - x-pack/plugin/apm-integration/build.gradle | 10 +- .../elasticsearch/xpack/apm/APMTracer.java | 132 ++++++++---------- .../plugin-metadata/plugin-security.policy | 13 +- 6 files changed, 89 insertions(+), 97 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e9e96a7eaaf94..b4698f26ec2d1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -56,4 +56,5 @@ jimfs_guava = 30.1-jre networknt_json_schema_validator = 1.0.48 # tracing -opentelemetry = 1.9.0 +apm_agent = 1.30.0 +opentelemetry = 1.12.0 diff --git a/run.sh b/run.sh index 6a070158e15bd..f75d92d22d0e5 100755 --- a/run.sh +++ b/run.sh @@ -2,22 +2,31 @@ set -eo pipefail +# This is the path that `./gradlew localDistro` prints out at the end cd build/distribution/local/elasticsearch-8.2.0-SNAPSHOT +# URL and token for sending traces SERVER_URL="" SECRET_TOKEN="" +# Optional - override the agent jar +OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.1-SNAPSHOT/elastic-apm-agent-1.30.1-SNAPSHOT.jar" + +# Clear this so that ES doesn't repeatedly complain about ignoring it export JAVA_HOME='' if [[ ! -f config/elasticsearch.keystore ]]; then ./bin/elasticsearch-keystore create -# echo "$SERVER_URL" | ./bin/elasticsearch-keystore add -x -f -v 'xpack.apm.endpoint' -# echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x -f -v 'xpack.apm.token' + # Use elastic:password for sending REST requests echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' fi -# AGENT="$PWD/modules/apm-integration/elastic-apm-agent-1.29.0.jar" -AGENT="$PWD/modules/apm-integration/elastic-apm-agent-1.29.0.jar" +AGENT_JAR="modules/apm-integration/elastic-apm-agent-1.30.0.jar" + +if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then + # Copy in WIP agent + cp "$OVERRIDE_AGENT_JAR" "$AGENT_JAR" +fi AGENT_OPTS="" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_name=elasticsearch" @@ -32,7 +41,9 @@ AGENT_OPTS="$AGENT_OPTS -Delastic.apm.enable_experimental_instrumentations=true" # SUSPEND_JVM="n" -# export ES_SERVER_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=n,address=*:5005 -ea -javaagent:$AGENT $AGENT_OPTS" -export ES_SERVER_OPTS="-ea -javaagent:$AGENT $AGENT_OPTS" +# export ES_SERVER_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=n,address=*:5005 -ea -javaagent:$AGENT_JAR $AGENT_OPTS" +export ES_SERVER_OPTS="-ea -javaagent:$AGENT_JAR $AGENT_OPTS" +# export ES_JAVA_OPTS="-Djava.security.debug=failure" +# export ES_JAVA_OPTS="-Djava.security.debug=access,failure" exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index df1946064f538..b617083e85ab2 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -157,8 +157,3 @@ grant { // system memory on Linux systems affected by JDK bug (#66629) permission java.io.FilePermission "/proc/meminfo", "read"; }; - -grant codeBase "${codebase.log4j-api}" { - permission java.lang.RuntimePermission "getClassLoader"; - permission java.lang.RuntimePermission "getProtectionDomain"; -}; diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index d31c46932d3a9..02bfe542d6872 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.VersionProperties + apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' @@ -19,7 +21,11 @@ dependencies { implementation "io.opentelemetry:opentelemetry-api:${versions.opentelemetry}" implementation "io.opentelemetry:opentelemetry-context:${versions.opentelemetry}" implementation "io.opentelemetry:opentelemetry-semconv:${versions.opentelemetry}-alpha" - runtimeOnly 'co.elastic.apm:elastic-apm-agent:1.30.0' + runtimeOnly "co.elastic.apm:elastic-apm-agent:${versions.apm_agent}" + + // OTel context needs these, I think + api "org.slf4j:slf4j-api:${versions.slf4j}" + api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" compileOnly project(path: xpackModule('core')) internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) @@ -28,7 +34,7 @@ dependencies { } } -// no unit-test for now +// FIXME: no unit-test for now tasks.named("test").configure { enabled = false } tasks.named("dependencyLicenses").configure { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index b58cd6f33fa50..ef2371bb86276 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -16,6 +16,8 @@ import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -37,8 +39,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; @@ -48,6 +48,8 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { + private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); + static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( "xpack.apm.tracing.names.include", @@ -57,7 +59,6 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic NodeScope ); - private final Semaphore shutdownPermits = new Semaphore(Integer.MAX_VALUE); private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ThreadPool threadPool; private final ClusterService clusterService; @@ -108,12 +109,6 @@ protected void doStart() { @Override protected void doStop() { destroyApmServices(); - try { - final boolean stopped = shutdownPermits.tryAcquire(Integer.MAX_VALUE, 30L, TimeUnit.SECONDS); - assert stopped : "did not stop tracing within timeout"; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } } @Override @@ -133,12 +128,8 @@ private void createApmServices() { } private void destroyApmServices() { - var services = this.services; this.services = null; - if (services == null) { - return; - } - spans.clear();// discard in-flight spans + this.spans.clear();// discard in-flight spans } @Override @@ -152,74 +143,71 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { return; } - spans.computeIfAbsent(traceable.getSpanId(), spanId -> { - return AccessController.doPrivileged((PrivilegedAction) () -> { - // services might be in shutdown state by this point, but this is handled by the open telemetry internally - final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(); - if (parentContext != null) { - spanBuilder.setParent(parentContext); - } - - for (Map.Entry entry : traceable.getAttributes().entrySet()) { - final Object value = entry.getValue(); - if (value instanceof String) { - spanBuilder.setAttribute(entry.getKey(), (String) value); - } else if (value instanceof Long) { - spanBuilder.setAttribute(entry.getKey(), (Long) value); - } else if (value instanceof Integer) { - spanBuilder.setAttribute(entry.getKey(), (Integer) value); - } else if (value instanceof Double) { - spanBuilder.setAttribute(entry.getKey(), (Double) value); - } else if (value instanceof Boolean) { - spanBuilder.setAttribute(entry.getKey(), (Boolean) value); - } else { - throw new IllegalArgumentException( - "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" - ); - } - } + spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { + // services might be in shutdown state by this point, but this is handled by the open telemetry internally + final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); + Context parentContext = getParentSpanContext(); + if (parentContext != null) { + spanBuilder.setParent(parentContext); + } - // These attributes don't apply to HTTP spans. The APM server can infer a number of things - // when "http." attributes are present - if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { - // hack transactions to avoid the 'custom' transaction type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); + for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Object value = entry.getValue(); + if (value instanceof String) { + spanBuilder.setAttribute(entry.getKey(), (String) value); + } else if (value instanceof Long) { + spanBuilder.setAttribute(entry.getKey(), (Long) value); + } else if (value instanceof Integer) { + spanBuilder.setAttribute(entry.getKey(), (Integer) value); + } else if (value instanceof Double) { + spanBuilder.setAttribute(entry.getKey(), (Double) value); + } else if (value instanceof Boolean) { + spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + } else { + throw new IllegalArgumentException( + "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" + ); } + } - // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); - // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); + // These attributes don't apply to HTTP spans. The APM server can infer a number of things + // when "http." attributes are present + if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { + // hack transactions to avoid the 'custom' transaction type + // this one is not part of OTel semantic attributes + spanBuilder.setAttribute("type", "elasticsearch"); + // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch + // also allows to set destination resource name in map + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); + spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); + } - // this will duplicate the "resource attributes" that are defined globally - // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in - // 7.16. - spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); + // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); + // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); - final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); - if (xOpaqueId != null) { - spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); - } + // this will duplicate the "resource attributes" that are defined globally + // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in + // 7.16. + spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); - final Span span = spanBuilder.startSpan(); + final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); + if (xOpaqueId != null) { + spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); + } + final Span span = spanBuilder.startSpan(); - final Map spanHeaders = new HashMap<>(); - final Context contextForNewSpan = Context.current().with(span); - services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); - spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + final Map spanHeaders = new HashMap<>(); + final Context contextForNewSpan = Context.current().with(span); + services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); + spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - threadContext.putHeader(spanHeaders); + threadContext.putHeader(spanHeaders); - // logGraphviz(span); + // logGraphviz(span); - return span; - }); - }); + return span; + })); } private static final Set CACHE = new HashSet<>(); diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index d8cf75c7a7e4d..a169c0a13f8b9 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -7,22 +7,13 @@ */ grant { - permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; - permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.lang.RuntimePermission "accessDeclaredMembers"; - // required by io.grpc.internal.DnsNameResolver in grpc-core - permission java.net.NetPermission "getProxySelector"; - permission java.lang.RuntimePermission "getClassLoader"; - permission java.lang.RuntimePermission "createClassLoader"; - permission java.net.SocketPermission "*", "connect,resolve"; permission java.lang.RuntimePermission "accessSystemModules"; + permission java.lang.RuntimePermission "createClassLoader"; + permission java.lang.RuntimePermission "getClassLoader"; }; grant codeBase "${codebase.elastic-apm-agent}" { permission java.lang.RuntimePermission "setFactory"; - permission java.lang.RuntimePermission "getClassLoader"; - permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "setContextClassLoader"; permission java.net.SocketPermission "*", "connect,resolve"; - permission java.lang.RuntimePermission "accessSystemModules"; }; From bda6ea2a9f986766a9a79faf3dac8e76ee170d2d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 14:59:50 +0100 Subject: [PATCH 38/90] Move java agent CLI option into plugin descriptor --- .../resources/plugin-descriptor.properties | 4 +- .../tools/launchers/BootstrapJvmOptions.java | 51 ++++++++++--------- .../tools/launchers/JvmOptionsParser.java | 11 +++- .../launchers/BootstrapJvmOptionsTests.java | 17 +++++-- .../org/elasticsearch/plugins/PluginInfo.java | 21 +++----- x-pack/plugin/apm-integration/build.gradle | 1 + 6 files changed, 61 insertions(+), 44 deletions(-) diff --git a/build-tools/src/main/resources/plugin-descriptor.properties b/build-tools/src/main/resources/plugin-descriptor.properties index e313c0f4692d3..59d2c180bbbb9 100644 --- a/build-tools/src/main/resources/plugin-descriptor.properties +++ b/build-tools/src/main/resources/plugin-descriptor.properties @@ -51,10 +51,10 @@ extended.plugins=${extendedPlugins} # # 'has.native.controller': whether or not the plugin has a native controller has.native.controller=${hasNativeController} -<% if (type == "bootstrap") { %> +<% if (javaOpts.isEmpty() == false) { %> # # 'java.opts': any additional command line parameters to pass to the JVM when -# Elasticsearch starts. Only applies to "bootstrap" plugins. +# Elasticsearch starts. java.opts=${javaOpts} <% } %> <% if (licensed) { %> diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/BootstrapJvmOptions.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/BootstrapJvmOptions.java index 3bccc5f769998..e0751cce0633b 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/BootstrapJvmOptions.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/BootstrapJvmOptions.java @@ -17,7 +17,7 @@ import java.util.List; import java.util.Locale; import java.util.Properties; -import java.util.stream.Collectors; +import java.util.function.Consumer; /** * This class looks for plugins whose "type" is "bootstrap". Such plugins @@ -28,27 +28,32 @@ public class BootstrapJvmOptions { private BootstrapJvmOptions() {} - public static List bootstrapJvmOptions(Path plugins) throws IOException { + public static List bootstrapJvmOptions(Path modules, Path plugins) throws IOException { + if (Files.isDirectory(modules) == false) { + throw new IllegalArgumentException("Modules path " + modules + " must be a directory"); + } + if (Files.isDirectory(plugins) == false) { throw new IllegalArgumentException("Plugins path " + plugins + " must be a directory"); } + final List modulesInfo = getPluginInfo(modules); final List pluginInfo = getPluginInfo(plugins); - return generateOptions(pluginInfo); + return generateOptions(modulesInfo, pluginInfo); } // Find all plugins and return their jars and descriptors. private static List getPluginInfo(Path plugins) throws IOException { final List pluginInfo = new ArrayList<>(); - final List pluginDirs = Files.list(plugins).collect(Collectors.toList()); + final List pluginDirs = Files.list(plugins).toList(); for (Path pluginDir : pluginDirs) { final List jarFiles = new ArrayList<>(); final Properties props = new Properties(); - final List pluginFiles = Files.list(pluginDir).collect(Collectors.toList()); + final List pluginFiles = Files.list(pluginDir).toList(); for (Path pluginFile : pluginFiles) { final String lowerCaseName = pluginFile.getFileName().toString().toLowerCase(Locale.ROOT); @@ -72,33 +77,33 @@ private static List getPluginInfo(Path plugins) throws IOException { } // package-private for testing - static List generateOptions(List pluginInfo) { + static List generateOptions(List modulesInfo, List pluginInfo) { final List bootstrapJars = new ArrayList<>(); - final List bootstrapOptions = new ArrayList<>(); + final List extraJavaOptions = new ArrayList<>(); - for (PluginInfo info : pluginInfo) { - final String type = info.properties.getProperty("type", "isolated").toLowerCase(Locale.ROOT); + // Add any additional Java CLI options. This could contain any number of options, + // but we don't attempt to split them up as all JVM options are concatenated together + // anyway + final Consumer infoConsumer = info -> { + final String type = info.properties.getProperty("type", "isolated").toLowerCase(Locale.ROOT); if (type.equals("bootstrap")) { bootstrapJars.addAll(info.jarFiles); - - // Add any additional Java CLI options. This could contain any number of options, - // but we don't attempt to split them up as all JVM options are concatenated together - // anyway - final String javaOpts = info.properties.getProperty("java.opts", ""); - if (javaOpts.isBlank() == false) { - bootstrapOptions.add(javaOpts); - } } - } + final String javaOpts = info.properties.getProperty("java.opts", ""); + if (javaOpts.isBlank() == false) { + extraJavaOptions.add(javaOpts); + } + }; - if (bootstrapJars.isEmpty()) { - return List.of(); - } + modulesInfo.forEach(infoConsumer); + pluginInfo.forEach(infoConsumer); - bootstrapOptions.add("-Xbootclasspath/a:" + String.join(":", bootstrapJars)); + if (bootstrapJars.isEmpty() == false) { + extraJavaOptions.add("-Xbootclasspath/a:" + String.join(":", bootstrapJars)); + } - return bootstrapOptions; + return extraJavaOptions; } // package-private for testing diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index 96e75b4bcf5ba..93c79e31ff5c5 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -122,6 +122,15 @@ public static void main(final String[] args) throws InterruptedException, IOExce private List jvmOptions(final Path config, Path plugins, final String esJavaOpts, final Map substitutions) throws InterruptedException, IOException, JvmOptionsFileParserException { + final Path esHome = Path.of(System.getenv("ES_HOME")); + if (Files.notExists(esHome)) { + throw new RuntimeException("ES_HOME not set or doesn't exist"); + } + Path modules = esHome.resolve("modules"); + if (Files.notExists(modules) || Files.isDirectory(modules) == false) { + throw new RuntimeException("ES_HOME does not point to a valid installation - [modules] not found or not a directory"); + } + final List jvmOptions = readJvmOptionsFiles(config); if (esJavaOpts != null) { @@ -135,7 +144,7 @@ private List jvmOptions(final Path config, Path plugins, final String es substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(); - final List bootstrapOptions = BootstrapJvmOptions.bootstrapJvmOptions(plugins); + final List bootstrapOptions = BootstrapJvmOptions.bootstrapJvmOptions(modules, plugins); final List finalJvmOptions = new ArrayList<>( systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size() + bootstrapOptions.size() diff --git a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/BootstrapJvmOptionsTests.java b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/BootstrapJvmOptionsTests.java index 3fb9e532db428..2b14c341e81fc 100644 --- a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/BootstrapJvmOptionsTests.java +++ b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/BootstrapJvmOptionsTests.java @@ -15,13 +15,14 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; public class BootstrapJvmOptionsTests extends LaunchersTestCase { public void testGenerateOptionsHandlesNoPlugins() { - final List options = BootstrapJvmOptions.generateOptions(List.of()); + final List options = BootstrapJvmOptions.generateOptions(List.of(), List.of()); assertThat(options, is(empty())); } @@ -30,10 +31,20 @@ public void testGenerateOptionsIgnoresNonBootstrapPlugins() { props.put("type", "isolated"); List info = List.of(new PluginInfo(List.of(), props)); - final List options = BootstrapJvmOptions.generateOptions(info); + final List options = BootstrapJvmOptions.generateOptions(List.of(), info); assertThat(options, is(empty())); } + public void testGenerateOptionsHandlesModules() { + Properties props = new Properties(); + props.put("type", "isolated"); + props.put("java.opts", "-ea"); + List info = List.of(new PluginInfo(List.of(), props)); + + final List options = BootstrapJvmOptions.generateOptions(info, List.of()); + assertThat(options, equalTo(List.of("-ea"))); + } + public void testGenerateOptionsHandlesBootstrapPlugins() { Properties propsWithoutJavaOpts = new Properties(); propsWithoutJavaOpts.put("type", "bootstrap"); @@ -54,7 +65,7 @@ public void testGenerateOptionsHandlesBootstrapPlugins() { propsWithJavaOpts.put("java.opts", "-Dkey=value -DotherKey=otherValue"); PluginInfo info4 = new PluginInfo(List.of("/path/fourth.jar"), propsWithJavaOpts); - final List options = BootstrapJvmOptions.generateOptions(List.of(info1, info2, info3, info4)); + final List options = BootstrapJvmOptions.generateOptions(List.of(), List.of(info1, info2, info3, info4)); assertThat( options, contains( diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java index de73e968a7af9..f627dbd587f13 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -199,12 +199,6 @@ public static PluginInfo readFromProperties(final Path path) throws IOException final String javaOpts = propsMap.remove("java.opts"); - if (type != PluginType.BOOTSTRAP && Strings.isNullOrEmpty(javaOpts) == false) { - throw new IllegalArgumentException( - "[java.opts] can only have a value when [type] is set to [bootstrap] for plugin [" + name + "]" - ); - } - boolean isLicensed = parseBooleanValue(name, "licensed", propsMap.remove("licensed")); if (propsMap.isEmpty() == false) { @@ -355,8 +349,7 @@ public PluginType getType() { } /** - * Returns any additional JVM command-line options that this plugin adds. Only applies to - * plugins whose type is "bootstrap". + * Returns any additional JVM command-line options that this plugin adds. * * @return any additional JVM options. */ @@ -385,9 +378,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("has_native_controller", hasNativeController); builder.field("licensed", isLicensed); builder.field("type", type); - if (type == PluginType.BOOTSTRAP) { - builder.field("java_opts", javaOpts); - } + builder.field("java_opts", javaOpts); } builder.endObject(); @@ -450,12 +441,12 @@ public String toString(String prefix) { .append(prefix) .append("Type: ") .append(type) + .append("\n") + .append(prefix) + .append("Java Opts: ") + .append(javaOpts) .append("\n"); - if (type == PluginType.BOOTSTRAP) { - information.append(prefix).append("Java Opts: ").append(javaOpts).append("\n"); - } - information.append(prefix) .append("Extended Plugins: ") .append(extendedPlugins) diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index 02bfe542d6872..e43a4e84ce6f3 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -15,6 +15,7 @@ esplugin { description 'Provides APM integration for Elasticsearch' classname 'org.elasticsearch.xpack.apm.APM' extendedPlugins = ['x-pack-core'] + javaOpts = "-javaagent:modules/apm-integration/elastic-apm-agent-${versions.apm_agent}.jar" } dependencies { From fb87e79ebe52bc0ec61db18e0def438d5d903ba2 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 17:08:45 +0100 Subject: [PATCH 39/90] Tweak for adding java opts via modules --- distribution/src/bin/elasticsearch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch index ec144c87b5fd6..2ee2756c3a1a7 100755 --- a/distribution/src/bin/elasticsearch +++ b/distribution/src/bin/elasticsearch @@ -98,7 +98,7 @@ fi # - second, JVM options are read from jvm.options and jvm.options.d/*.options # - third, JVM options from ES_JAVA_OPTS are applied # - fourth, ergonomic JVM options are applied -ES_JAVA_OPTS=`export ES_TMPDIR; "$JAVA" "$XSHARE" -cp "$LAUNCHERS_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_PATH_CONF" "$ES_HOME/plugins"` +ES_JAVA_OPTS=`export ES_HOME; export ES_TMPDIR; "$JAVA" "$XSHARE" -cp "$LAUNCHERS_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_PATH_CONF" "$ES_HOME/plugins"` # Remove enrollment related parameters before passing the arg list to Elasticsearch for i in "${!ARG_LIST[@]}"; do From b76910b17e67c57a8d4f06c7760af327f29cd3e7 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 17:32:44 +0100 Subject: [PATCH 40/90] Header fixes --- .../src/main/java/org/elasticsearch/rest/RestController.java | 4 ++-- .../src/main/java/org/elasticsearch/xpack/apm/APMTracer.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b57773d9a2924..31d8b795c2904 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -504,10 +504,10 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t if (traceparent.length() >= 55) { final String traceId = traceparent.substring(3, 35); threadContext.putHeader(Task.TRACE_ID, traceId); - threadContext.putHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); + threadContext.putTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); } } else if (name.equals(Task.TRACE_STATE)) { - threadContext.putHeader("parent_" + Task.TRACE_STATE, distinctHeaderValues.get(0)); + threadContext.putTransient("parent_" + Task.TRACE_STATE, distinctHeaderValues.get(0)); } else { threadContext.putHeader(name, String.join(",", distinctHeaderValues)); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index a78e0460631b1..babb869fe6d40 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -346,8 +346,8 @@ private boolean isSpanNameIncluded(String name) { private Context getParentSpanContext() { // Check for a parent context in the thread context. final ThreadContext threadContext = threadPool.getThreadContext(); - final String traceParentHeader = threadContext.getHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER); - final String traceStateHeader = threadContext.getHeader("parent_" + Task.TRACE_STATE); + final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); if (traceParentHeader != null) { final Map traceContextMap = new HashMap<>(2); From a7266b341427b416e81e98078f9bd58507ebe12a Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 17:33:18 +0100 Subject: [PATCH 41/90] Add run script --- run.sh | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100755 run.sh diff --git a/run.sh b/run.sh new file mode 100755 index 0000000000000..bb1824ee3c229 --- /dev/null +++ b/run.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +set -eo pipefail + +# This is the path that `./gradlew localDistro` prints out at the end +cd build/distribution/local/elasticsearch-8.2.0-SNAPSHOT + +# URL and token for sending traces +SERVER_URL="" +SECRET_TOKEN="" + +# Clear this so that ES doesn't repeatedly complain about ignoring it +export JAVA_HOME='' + +if [[ ! -f config/elasticsearch.keystore ]]; then + ./bin/elasticsearch-keystore create + echo "$SERVER_URL" | ./bin/elasticsearch-keystore add -x 'xpack.apm.endpoint' + echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x 'xpack.apm.token' + # Use elastic:password for sending REST requests + echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' +fi + +# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=n,address=*:5005 -ea " +# export ES_JAVA_OPTS="-Djava.security.debug=failure" +# export ES_JAVA_OPTS="-Djava.security.debug=access,failure" + +exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true From 48bebd3568ec4d7b4a8047df210461ac1015858c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 30 Mar 2022 17:32:44 +0100 Subject: [PATCH 42/90] Header fixes --- .../src/main/java/org/elasticsearch/rest/RestController.java | 4 ++-- .../src/main/java/org/elasticsearch/xpack/apm/APMTracer.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b57773d9a2924..31d8b795c2904 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -504,10 +504,10 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t if (traceparent.length() >= 55) { final String traceId = traceparent.substring(3, 35); threadContext.putHeader(Task.TRACE_ID, traceId); - threadContext.putHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); + threadContext.putTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); } } else if (name.equals(Task.TRACE_STATE)) { - threadContext.putHeader("parent_" + Task.TRACE_STATE, distinctHeaderValues.get(0)); + threadContext.putTransient("parent_" + Task.TRACE_STATE, distinctHeaderValues.get(0)); } else { threadContext.putHeader(name, String.join(",", distinctHeaderValues)); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index ef2371bb86276..8d53a796a1270 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -261,8 +261,8 @@ private boolean isSpanNameIncluded(String name) { private Context getParentSpanContext() { // Check for a parent context in the thread context. final ThreadContext threadContext = threadPool.getThreadContext(); - final String traceParentHeader = threadContext.getHeader("parent_" + Task.TRACE_PARENT_HTTP_HEADER); - final String traceStateHeader = threadContext.getHeader("parent_" + Task.TRACE_STATE); + final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); if (traceParentHeader != null) { final Map traceContextMap = new HashMap<>(2); From 1436637db850713f563404da640ed705f0c8159f Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 10:06:09 +0100 Subject: [PATCH 43/90] Tweaks --- x-pack/plugin/apm-integration/build.gradle | 2 -- .../xpack/security/authz/AuthorizationService.java | 11 ++++------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index e43a4e84ce6f3..dfd5bbf4ece4c 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -4,8 +4,6 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import org.elasticsearch.gradle.VersionProperties - apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 0aa3de6537f41..f6c95c1638534 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -271,13 +271,10 @@ public void authorize( } else { final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); final AuthorizationEngine engine = getAuthorizationEngine(authentication); - final ActionListener authzInfoListener = wrapPreservingContext( - ActionListener.wrap(authorizationInfo -> { - threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); - maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); - }, listener::onFailure), - threadContext - ); + final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { + threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); + }, listener::onFailure), threadContext); engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); } } From dfb8f8b16e957d32636699e64b95808260ddf0fb Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 10:06:35 +0100 Subject: [PATCH 44/90] Detach tracing when starting an index's background tasks --- .../common/util/concurrent/ThreadContext.java | 23 +++++++++++++++++++ .../org/elasticsearch/index/IndexService.java | 12 ++++++---- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index af897436b2d56..c31518549b8ea 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -159,6 +159,29 @@ public StoredContext newTraceContext() { return () -> threadLocal.set(context); } + public StoredContext clearTraceContext() { + final ThreadContextStruct context = threadLocal.get(); + final Map newRequestHeaders = new HashMap<>(context.requestHeaders); + final Map newTransientHeaders = new HashMap<>(context.transientHeaders); + + newRequestHeaders.remove(Task.TRACE_PARENT_HTTP_HEADER); + newRequestHeaders.remove(Task.TRACE_STATE); + + newTransientHeaders.remove("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + newTransientHeaders.remove("parent_" + Task.TRACE_STATE); + + threadLocal.set( + new ThreadContextStruct( + newRequestHeaders, + context.responseHeaders, + newTransientHeaders, + context.isSystemContext, + context.warningHeadersSize + ) + ); + return () -> threadLocal.set(context); + } + private Map headers(ThreadContextStruct context) { Map map = Maps.newMapWithExpectedSize(org.elasticsearch.tasks.Task.HEADERS_TO_COPY.size()); for (String header : org.elasticsearch.tasks.Task.HEADERS_TO_COPY) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 325678f93904a..6a27182ac381b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -241,11 +241,13 @@ public IndexService( this.readerWrapper = wrapperFactory.apply(this); this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners); this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); - // kick off async ops for the first shard in this index - this.refreshTask = new AsyncRefreshTask(this); - this.trimTranslogTask = new AsyncTrimTranslogTask(this); - this.globalCheckpointTask = new AsyncGlobalCheckpointTask(this); - this.retentionLeaseSyncTask = new AsyncRetentionLeaseSyncTask(this); + try (var ignored = threadPool.getThreadContext().clearTraceContext()) { + // kick off async ops for the first shard in this index + this.refreshTask = new AsyncRefreshTask(this); + this.trimTranslogTask = new AsyncTrimTranslogTask(this); + this.globalCheckpointTask = new AsyncGlobalCheckpointTask(this); + this.retentionLeaseSyncTask = new AsyncRetentionLeaseSyncTask(this); + } updateFsyncTaskIfNecessary(); } From d94ec59d46135f202984f8cc5f4cc40756a13c0e Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 10:06:35 +0100 Subject: [PATCH 45/90] Detach tracing when starting an index's background tasks --- .../common/util/concurrent/ThreadContext.java | 23 +++++++++++++++++++ .../org/elasticsearch/index/IndexService.java | 12 ++++++---- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index af897436b2d56..c31518549b8ea 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -159,6 +159,29 @@ public StoredContext newTraceContext() { return () -> threadLocal.set(context); } + public StoredContext clearTraceContext() { + final ThreadContextStruct context = threadLocal.get(); + final Map newRequestHeaders = new HashMap<>(context.requestHeaders); + final Map newTransientHeaders = new HashMap<>(context.transientHeaders); + + newRequestHeaders.remove(Task.TRACE_PARENT_HTTP_HEADER); + newRequestHeaders.remove(Task.TRACE_STATE); + + newTransientHeaders.remove("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + newTransientHeaders.remove("parent_" + Task.TRACE_STATE); + + threadLocal.set( + new ThreadContextStruct( + newRequestHeaders, + context.responseHeaders, + newTransientHeaders, + context.isSystemContext, + context.warningHeadersSize + ) + ); + return () -> threadLocal.set(context); + } + private Map headers(ThreadContextStruct context) { Map map = Maps.newMapWithExpectedSize(org.elasticsearch.tasks.Task.HEADERS_TO_COPY.size()); for (String header : org.elasticsearch.tasks.Task.HEADERS_TO_COPY) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 325678f93904a..6a27182ac381b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -241,11 +241,13 @@ public IndexService( this.readerWrapper = wrapperFactory.apply(this); this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners); this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); - // kick off async ops for the first shard in this index - this.refreshTask = new AsyncRefreshTask(this); - this.trimTranslogTask = new AsyncTrimTranslogTask(this); - this.globalCheckpointTask = new AsyncGlobalCheckpointTask(this); - this.retentionLeaseSyncTask = new AsyncRetentionLeaseSyncTask(this); + try (var ignored = threadPool.getThreadContext().clearTraceContext()) { + // kick off async ops for the first shard in this index + this.refreshTask = new AsyncRefreshTask(this); + this.trimTranslogTask = new AsyncTrimTranslogTask(this); + this.globalCheckpointTask = new AsyncGlobalCheckpointTask(this); + this.retentionLeaseSyncTask = new AsyncRetentionLeaseSyncTask(this); + } updateFsyncTaskIfNecessary(); } From d834dd5a2ffd4543b65455fe7ee3334fdf4b42f3 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 17:06:20 +0100 Subject: [PATCH 46/90] Start a doc about tracing --- TRACING.md | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 TRACING.md diff --git a/TRACING.md b/TRACING.md new file mode 100644 index 0000000000000..7c8f6999ef0e1 --- /dev/null +++ b/TRACING.md @@ -0,0 +1,65 @@ +# Tracing in Elasticsearch + +Elasticsearch is instrumented using the [OpenTelemetry][otel] API, which allows +us to gather traces and analyze what Elasticsearch is doing. + +## How is tracing implemented? + +The Elasticsearch server code contains a +[`tracing`](./server/src/main/java/org/elasticsearch/tracing/) package, which is +an abstraction over the OpenTelemetry API. All locations in the code that +performing instrumentation and tracing must use these abstractions. + +Separately, there is the [`apm-integration`](./x-pack/plugins/apm-integration/) +module, which works with the OpenTelemetry API directly to manipulate spans. + +## Where is tracing data sent? + +You need to have an OpenTelemetry server running somewhere. For example, you can +create a deployment in Elastic Cloud, and use Elastic's APM integration. + +## How is tracing data sent? + +This branch uses the OpenTelemetry SDK, which is a reference implementation of +the API. Work is underway to use the Elastic APM agent for Java, which attaches +at runtime and removes the need for Elasticsearch to hard-code the use of an SDK. + +## What do we trace? + +We primarily trace "tasks". The tasks framework in Elasticsearch allows work to +scheduled for execution, cancelled, executed in a different thread pool, and so +on. Tracing a task results in a "span", which represents the execution of the +task in the tracing system. We also instrument REST requests, which are not (at +present) modelled by tasks. + +A span can be associated with a parent span, which allows all spans in, for +example, a REST request to be grouped together. Spans can track the +Elasticsearch supports the [W3c +Trace Context](https://www.w3.org/TR/trace-context/) headers. It also uses these + +## Thread contexts and nested spans + +When a span is started, Elasticsearch tracks information about that span in the +current [thread +context](./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java). +When a nested span is started, a new thread context is created, and the current +span information is moved so that it becomes the parent span information. + +Sometimes we need to detach new spans from their parent. For example, creating +an index starts some related background tasks, but these shouldn't be associated +with the REST request, otherwise all the background task spans will be +associated with the REST request for as long as Elasticsearch is running. + +## How to I trace something that isn't a task? + +First work out if you can turn it into a task. No, really. + +If you can't do that, you'll need to ensure that your class can access the +`Node`'s tracers, then call the appropriate methods on the tracers when a span +should start and end. + +## What attributes should I set? + +TODO. + +[otel]: https://opentelemetry.io/ From b611835d12d79bd751b08a040d0c8ee5ac744046 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 20:49:52 +0100 Subject: [PATCH 47/90] Span attribte tweaks --- .../elasticsearch/xpack/apm/APMTracer.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index babb869fe6d40..d5d78f1bc21b3 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -11,6 +11,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.Context; @@ -257,19 +258,20 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // These attributes don't apply to HTTP spans. The APM server can infer a number of things // when "http." attributes are present - if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { - // hack transactions to avoid the 'custom' transaction type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map + final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); + if (isHttpSpan) { + spanBuilder.setSpanKind(SpanKind.SERVER); + } else { + spanBuilder.setSpanKind(SpanKind.INTERNAL); +// // hack transactions to avoid the 'custom' transaction type +// // this one is not part of OTel semantic attributes +// spanBuilder.setAttribute("type", "elasticsearch"); +// // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch +// // also allows to set destination resource name in map spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); } - // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); - // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); - // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in // 7.16. From 53058b32de934fb5814af670a7152171239ec203 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 20:49:52 +0100 Subject: [PATCH 48/90] Span attribte tweaks --- .../elasticsearch/xpack/apm/APMTracer.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 8d53a796a1270..1c0d9590711dc 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -11,6 +11,7 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; @@ -172,19 +173,20 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // These attributes don't apply to HTTP spans. The APM server can infer a number of things // when "http." attributes are present - if (traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")) == false) { - // hack transactions to avoid the 'custom' transaction type - // this one is not part of OTel semantic attributes - spanBuilder.setAttribute("type", "elasticsearch"); - // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch - // also allows to set destination resource name in map + final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); + if (isHttpSpan) { + spanBuilder.setSpanKind(SpanKind.SERVER); + } else { + spanBuilder.setSpanKind(SpanKind.INTERNAL); +// // hack transactions to avoid the 'custom' transaction type +// // this one is not part of OTel semantic attributes +// spanBuilder.setAttribute("type", "elasticsearch"); +// // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch +// // also allows to set destination resource name in map spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); } - // spanBuilder.setAttribute(SemanticAttributes.DB_SYSTEM, "elasticsearch"); - // spanBuilder.setAttribute(SemanticAttributes.DB_NAME, clusterService.getNodeName()); - // this will duplicate the "resource attributes" that are defined globally // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in // 7.16. From 0989f1784f1f73d56c694621c3ceb5afd9b3f23b Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 31 Mar 2022 21:12:08 +0100 Subject: [PATCH 49/90] Add extra docker tag --- distribution/docker/build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index caa007ec0a58d..66539041b6d8d 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -330,7 +330,8 @@ private static List generateTags(DockerBase base) { return [ "${image}:test", "${image}:${version}", - "docker.elastic.co/${namespace}/${image}:${version}" + "docker.elastic.co/${namespace}/${image}:${version}", + "docker.elastic.co/${namespace}/${image}:${version}-agent" ] } From 1be7e7ec8fa3b4c62202fb6b1c93aea204bff148 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 1 Apr 2022 13:39:31 +0100 Subject: [PATCH 50/90] Tweaks --- .../elasticsearch/xpack/apm/APMTracer.java | 20 +------------------ .../security/authz/AuthorizationService.java | 2 +- 2 files changed, 2 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 1c0d9590711dc..354505e40b1a4 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -15,7 +15,6 @@ import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; -import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -145,7 +144,6 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { } spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { - // services might be in shutdown state by this point, but this is handled by the open telemetry internally final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); Context parentContext = getParentSpanContext(); if (parentContext != null) { @@ -171,25 +169,9 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { } } - // These attributes don't apply to HTTP spans. The APM server can infer a number of things - // when "http." attributes are present final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); - if (isHttpSpan) { - spanBuilder.setSpanKind(SpanKind.SERVER); - } else { - spanBuilder.setSpanKind(SpanKind.INTERNAL); -// // hack transactions to avoid the 'custom' transaction type -// // this one is not part of OTel semantic attributes -// spanBuilder.setAttribute("type", "elasticsearch"); -// // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch -// // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); - } + spanBuilder.setSpanKind(isHttpSpan ? SpanKind.SERVER : SpanKind.INTERNAL); - // this will duplicate the "resource attributes" that are defined globally - // but providing them as span attributes allow easier mapping through labels as otel attributes are stored as-is only in - // 7.16. spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index f6c95c1638534..e289a88f01de1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -111,7 +111,7 @@ public class AuthorizationService { true, Property.NodeScope ); - public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), true, Property.NodeScope); + public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), false, Property.NodeScope); private static final AuthorizationInfo SYSTEM_AUTHZ_INFO = () -> Collections.singletonMap( PRINCIPAL_ROLES_FIELD_NAME, new String[] { SystemUser.ROLE_NAME } From fd5f3cb25377608a2198d707c8bf9697f69c841c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 12 Apr 2022 14:21:26 +0100 Subject: [PATCH 51/90] Bump APM agent --- build-tools-internal/version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b4698f26ec2d1..f058a0e511520 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -56,5 +56,5 @@ jimfs_guava = 30.1-jre networknt_json_schema_validator = 1.0.48 # tracing -apm_agent = 1.30.0 +apm_agent = 1.30.1 opentelemetry = 1.12.0 From ef6ae5f56e9ff75c836f32e0477c81799e19960d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 13 Apr 2022 13:41:08 +0100 Subject: [PATCH 52/90] Get tracing across nodes working again Turns out that if you disabled authz tracing, it prevented tracing headers being sent to other nodes because the authz service still started a new trace context, without starting a new span. Also require callers of `TaskManager#register` to handle their own trace contexts. Callers of TaskManager#registerAndExecute` don't have to do this, which is most of the codebase. This is to avoid trace header pollution in the thread context. --- run.sh | 6 +- .../cluster/InternalClusterInfoService.java | 188 ++++++------- .../cluster/service/MasterService.java | 246 +++++++++--------- .../RetentionLeaseBackgroundSyncAction.java | 78 +++--- .../index/seqno/RetentionLeaseSyncAction.java | 66 ++--- .../index/shard/PrimaryReplicaSyncer.java | 65 ++--- .../PersistentTasksNodeService.java | 92 ++++--- .../persistent/StartPersistentTaskAction.java | 1 + .../org/elasticsearch/tasks/TaskManager.java | 44 ++-- .../transport/RequestHandlerRegistry.java | 31 ++- .../transport/TransportService.java | 2 + .../PersistentTasksNodeServiceTests.java | 19 +- .../xpack/apm/TestOpenTelemetry.java | 241 +++++++++++++++++ .../elasticsearch/xpack/apm/APMTracer.java | 44 ++-- .../TransportSubmitAsyncSearchAction.java | 112 ++++---- .../action/InternalExecutePolicyAction.java | 98 +++---- .../TrainedModelAllocationNodeService.java | 24 +- .../ql/async/AsyncTaskManagementService.java | 30 ++- .../security/authz/AuthorizationService.java | 97 +++---- 19 files changed, 895 insertions(+), 589 deletions(-) create mode 100644 x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java diff --git a/run.sh b/run.sh index bb1824ee3c229..f6f29016eedfc 100755 --- a/run.sh +++ b/run.sh @@ -20,8 +20,10 @@ if [[ ! -f config/elasticsearch.keystore ]]; then echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' fi -# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=n,address=*:5005 -ea " +# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:5007 " +export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007 " + # export ES_JAVA_OPTS="-Djava.security.debug=failure" # export ES_JAVA_OPTS="-Djava.security.debug=access,failure" -exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true +exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true -Eingest.geoip.downloader.enabled=false diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 57980ec033f87..654277187a73a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -171,120 +171,124 @@ void execute() { nodesStatsRequest.clear(); nodesStatsRequest.addMetric(NodesStatsRequest.Metric.FS.metricName()); nodesStatsRequest.timeout(fetchTimeout); - client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(NodesStatsResponse nodesStatsResponse) { - logger.trace("received node stats response"); - - for (final FailedNodeException failure : nodesStatsResponse.failures()) { - logger.warn( - new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), - failure.getCause() + try (var ignored = threadPool.getThreadContext().clearTraceContext()) { + client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(NodesStatsResponse nodesStatsResponse) { + logger.trace("received node stats response"); + + for (final FailedNodeException failure : nodesStatsResponse.failures()) { + logger.warn( + new ParameterizedMessage("failed to retrieve stats for node [{}]", failure.nodeId()), + failure.getCause() + ); + } + + ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); + fillDiskUsagePerNode( + adjustNodesStats(nodesStatsResponse.getNodes()), + leastAvailableUsagesBuilder, + mostAvailableUsagesBuilder ); + leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); + mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); } - ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); - ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); - fillDiskUsagePerNode( - adjustNodesStats(nodesStatsResponse.getNodes()), - leastAvailableUsagesBuilder, - mostAvailableUsagesBuilder - ); - leastAvailableSpaceUsages = leastAvailableUsagesBuilder.build(); - mostAvailableSpaceUsages = mostAvailableUsagesBuilder.build(); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve node stats", e); - } else { - logger.warn("failed to retrieve node stats", e); + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve node stats", e); + } else { + logger.warn("failed to retrieve node stats", e); + } + leastAvailableSpaceUsages = ImmutableOpenMap.of(); + mostAvailableSpaceUsages = ImmutableOpenMap.of(); } - leastAvailableSpaceUsages = ImmutableOpenMap.of(); - mostAvailableSpaceUsages = ImmutableOpenMap.of(); - } - }, this::onStatsProcessed)); + }, this::onStatsProcessed)); + } final IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); indicesStatsRequest.clear(); indicesStatsRequest.store(true); indicesStatsRequest.indicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_CLOSED_HIDDEN); indicesStatsRequest.timeout(fetchTimeout); - client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(IndicesStatsResponse indicesStatsResponse) { - logger.trace("received indices stats response"); - - if (indicesStatsResponse.getShardFailures().length > 0) { - final Set failedNodeIds = new HashSet<>(); - for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { - if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { - if (failedNodeIds.add(failedNodeException.nodeId())) { + try (var ignored = threadPool.getThreadContext().clearTraceContext()) { + client.admin().indices().stats(indicesStatsRequest, ActionListener.runAfter(new ActionListener<>() { + @Override + public void onResponse(IndicesStatsResponse indicesStatsResponse) { + logger.trace("received indices stats response"); + + if (indicesStatsResponse.getShardFailures().length > 0) { + final Set failedNodeIds = new HashSet<>(); + for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { + if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { + if (failedNodeIds.add(failedNodeException.nodeId())) { + logger.warn( + new ParameterizedMessage( + "failed to retrieve shard stats from node [{}]", + failedNodeException.nodeId() + ), + failedNodeException.getCause() + ); + } + logger.trace( + new ParameterizedMessage( + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() + ); + } else { logger.warn( new ParameterizedMessage( - "failed to retrieve shard stats from node [{}]", - failedNodeException.nodeId() + "failed to retrieve stats for shard [{}][{}]", + shardFailure.index(), + shardFailure.shardId() ), - failedNodeException.getCause() + shardFailure.getCause() ); } - logger.trace( - new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); - } else { - logger.warn( - new ParameterizedMessage( - "failed to retrieve stats for shard [{}][{}]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); } } - } - final ShardStats[] stats = indicesStatsResponse.getShards(); - final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); - final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); - final Map reservedSpaceBuilders = new HashMap<>(); - buildShardLevelInfo( - stats, - shardSizeByIdentifierBuilder, - shardDataSetSizeBuilder, - dataPathByShardRoutingBuilder, - reservedSpaceBuilders - ); + final ShardStats[] stats = indicesStatsResponse.getShards(); + final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder shardDataSetSizeBuilder = ImmutableOpenMap.builder(); + final ImmutableOpenMap.Builder dataPathByShardRoutingBuilder = ImmutableOpenMap.builder(); + final Map reservedSpaceBuilders = new HashMap<>(); + buildShardLevelInfo( + stats, + shardSizeByIdentifierBuilder, + shardDataSetSizeBuilder, + dataPathByShardRoutingBuilder, + reservedSpaceBuilders + ); - final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap - .builder(); - reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); + final ImmutableOpenMap.Builder rsrvdSpace = ImmutableOpenMap + .builder(); + reservedSpaceBuilders.forEach((nodeAndPath, builder) -> rsrvdSpace.put(nodeAndPath, builder.build())); - indicesStatsSummary = new IndicesStatsSummary( - shardSizeByIdentifierBuilder.build(), - shardDataSetSizeBuilder.build(), - dataPathByShardRoutingBuilder.build(), - rsrvdSpace.build() - ); - } + indicesStatsSummary = new IndicesStatsSummary( + shardSizeByIdentifierBuilder.build(), + shardDataSetSizeBuilder.build(), + dataPathByShardRoutingBuilder.build(), + rsrvdSpace.build() + ); + } - @Override - public void onFailure(Exception e) { - if (e instanceof ClusterBlockException) { - logger.trace("failed to retrieve indices stats", e); - } else { - logger.warn("failed to retrieve indices stats", e); + @Override + public void onFailure(Exception e) { + if (e instanceof ClusterBlockException) { + logger.trace("failed to retrieve indices stats", e); + } else { + logger.warn("failed to retrieve indices stats", e); + } + indicesStatsSummary = IndicesStatsSummary.EMPTY; } - indicesStatsSummary = IndicesStatsSummary.EMPTY; - } - }, this::onStatsProcessed)); + }, this::onStatsProcessed)); + } } private void onStatsProcessed() { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index be9fa451e7079..8943b725b4107 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -286,144 +286,146 @@ private void runTasks( logExecutionTime(executionTime, "notify listeners on unchanged cluster state", summary); clusterStateUpdateStatsTracker.onUnchangedClusterState(computationTime.millis(), executionTime.millis()); } else { - final Task task = taskManager.register("master", STATE_UPDATE_ACTION_NAME, new TaskAwareRequest() { - @Override - public void setParentTask(TaskId taskId) {} - - @Override - public TaskId getParentTask() { - return TaskId.EMPTY_TASK_ID; - } + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + final Task task = taskManager.register("master", STATE_UPDATE_ACTION_NAME, new TaskAwareRequest() { + @Override + public void setParentTask(TaskId taskId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } - @Override - public String getDescription() { - return "publication of cluster state [" + newClusterState.getVersion() + "]"; - } - }); - try { - if (logger.isTraceEnabled()) { - logger.trace("cluster state updated, source [{}]\n{}", summary, newClusterState); - } else { - logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), summary); - } - final long publicationStartTime = threadPool.rawRelativeTimeInMillis(); + @Override + public String getDescription() { + return "publication of cluster state [" + newClusterState.getVersion() + "]"; + } + }); try { - final ClusterStatePublicationEvent clusterStatePublicationEvent = new ClusterStatePublicationEvent( - summary, - previousClusterState, - newClusterState, - task, - computationTime.millis(), - publicationStartTime - ); - - // new cluster state, notify all listeners - final DiscoveryNodes.Delta nodesDelta = newClusterState.nodes().delta(previousClusterState.nodes()); - if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { - String nodesDeltaSummary = nodesDelta.shortSummary(); - if (nodesDeltaSummary.length() > 0) { - logger.info( - "{}, term: {}, version: {}, delta: {}", - summary, - newClusterState.term(), - newClusterState.version(), - nodesDeltaSummary - ); - } + if (logger.isTraceEnabled()) { + logger.trace("cluster state updated, source [{}]\n{}", summary, newClusterState); + } else { + logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), summary); } - - logger.debug("publishing cluster state version [{}]", newClusterState.version()); - publish( - clusterStatePublicationEvent, - new CompositeTaskAckListener( - executionResults.stream() - .map(ExecutionResult::getContextPreservingAckListener) - .filter(Objects::nonNull) - .map( - contextPreservingAckListener -> new TaskAckListener( - contextPreservingAckListener, - newClusterState.version(), - newClusterState.nodes(), - threadPool - ) - ) - .toList() - ), - new ActionListener<>() { - @Override - public void onResponse(Void unused) { - final long notificationStartTime = threadPool.rawRelativeTimeInMillis(); - for (final var executionResult : executionResults) { - executionResult.onPublishSuccess(newClusterState); - } - - try { - executor.clusterStatePublished(newClusterState); - } catch (Exception e) { - logger.error( - () -> new ParameterizedMessage( - "exception thrown while notifying executor of new cluster state publication [{}]", - summary - ), - e - ); - } - final TimeValue executionTime = getTimeSince(notificationStartTime); - logExecutionTime( - executionTime, - "notify listeners on successful publication of cluster state (version: " - + newClusterState.version() - + ", uuid: " - + newClusterState.stateUUID() - + ')', - summary - ); - clusterStateUpdateStatsTracker.onPublicationSuccess( - threadPool.rawRelativeTimeInMillis(), - clusterStatePublicationEvent, - executionTime.millis() + final long publicationStartTime = threadPool.rawRelativeTimeInMillis(); + try { + final ClusterStatePublicationEvent clusterStatePublicationEvent = new ClusterStatePublicationEvent( + summary, + previousClusterState, + newClusterState, + task, + computationTime.millis(), + publicationStartTime + ); + + // new cluster state, notify all listeners + final DiscoveryNodes.Delta nodesDelta = newClusterState.nodes().delta(previousClusterState.nodes()); + if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { + String nodesDeltaSummary = nodesDelta.shortSummary(); + if (nodesDeltaSummary.length() > 0) { + logger.info( + "{}, term: {}, version: {}, delta: {}", + summary, + newClusterState.term(), + newClusterState.version(), + nodesDeltaSummary ); } + } - @Override - public void onFailure(Exception exception) { - if (exception instanceof FailedToCommitClusterStateException failedToCommitClusterStateException) { + logger.debug("publishing cluster state version [{}]", newClusterState.version()); + publish( + clusterStatePublicationEvent, + new CompositeTaskAckListener( + executionResults.stream() + .map(ExecutionResult::getContextPreservingAckListener) + .filter(Objects::nonNull) + .map( + contextPreservingAckListener -> new TaskAckListener( + contextPreservingAckListener, + newClusterState.version(), + newClusterState.nodes(), + threadPool + ) + ) + .toList() + ), + new ActionListener<>() { + @Override + public void onResponse(Void unused) { final long notificationStartTime = threadPool.rawRelativeTimeInMillis(); - final long version = newClusterState.version(); - logger.warn( - () -> new ParameterizedMessage( - "failing [{}]: failed to commit cluster state version [{}]", - summary, - version - ), - exception - ); for (final var executionResult : executionResults) { - executionResult.onPublishFailure(failedToCommitClusterStateException); + executionResult.onPublishSuccess(newClusterState); } - final long notificationMillis = threadPool.rawRelativeTimeInMillis() - notificationStartTime; - clusterStateUpdateStatsTracker.onPublicationFailure( - threadPool.rawRelativeTimeInMillis(), - clusterStatePublicationEvent, - notificationMillis + + try { + executor.clusterStatePublished(newClusterState); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "exception thrown while notifying executor of new cluster state publication [{}]", + summary + ), + e + ); + } + final TimeValue executionTime = getTimeSince(notificationStartTime); + logExecutionTime( + executionTime, + "notify listeners on successful publication of cluster state (version: " + + newClusterState.version() + + ", uuid: " + + newClusterState.stateUUID() + + ')', + summary ); - } else { - assert false : exception; - clusterStateUpdateStatsTracker.onPublicationFailure( + clusterStateUpdateStatsTracker.onPublicationSuccess( threadPool.rawRelativeTimeInMillis(), clusterStatePublicationEvent, - 0L + executionTime.millis() ); - handleException(summary, publicationStartTime, newClusterState, exception); + } + + @Override + public void onFailure(Exception exception) { + if (exception instanceof FailedToCommitClusterStateException failedToCommitClusterStateException) { + final long notificationStartTime = threadPool.rawRelativeTimeInMillis(); + final long version = newClusterState.version(); + logger.warn( + () -> new ParameterizedMessage( + "failing [{}]: failed to commit cluster state version [{}]", + summary, + version + ), + exception + ); + for (final var executionResult : executionResults) { + executionResult.onPublishFailure(failedToCommitClusterStateException); + } + final long notificationMillis = threadPool.rawRelativeTimeInMillis() - notificationStartTime; + clusterStateUpdateStatsTracker.onPublicationFailure( + threadPool.rawRelativeTimeInMillis(), + clusterStatePublicationEvent, + notificationMillis + ); + } else { + assert false : exception; + clusterStateUpdateStatsTracker.onPublicationFailure( + threadPool.rawRelativeTimeInMillis(), + clusterStatePublicationEvent, + 0L + ); + handleException(summary, publicationStartTime, newClusterState, exception); + } } } - } - ); - } catch (Exception e) { - handleException(summary, publicationStartTime, newClusterState, e); + ); + } catch (Exception e) { + handleException(summary, publicationStartTime, newClusterState, e); + } + } finally { + taskManager.unregister(task); } - } finally { - taskManager.unregister(task); } } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java index bf736ab0c7630..41174d671c5b3 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseBackgroundSyncAction.java @@ -98,46 +98,52 @@ final void backgroundSync(ShardId shardId, String primaryAllocationId, long prim // we have to execute under the system context so that if security is enabled the sync is authorized threadContext.markAsSystemContext(); final Request request = new Request(shardId, retentionLeases); - final ReplicationTask task = (ReplicationTask) taskManager.register("transport", "retention_lease_background_sync", request); - transportService.sendChildRequest( - clusterService.localNode(), - transportPrimaryAction, - new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), - task, - transportOptions, - new TransportResponseHandler() { - @Override - public ReplicationResponse read(StreamInput in) throws IOException { - return newResponseInstance(in); - } - - @Override - public void handleResponse(ReplicationResponse response) { - task.setPhase("finished"); - taskManager.unregister(task); - } + try (var ignored = threadContext.newTraceContext()) { + final ReplicationTask task = (ReplicationTask) taskManager.register( + "transport", + "retention_lease_background_sync", + request + ); + transportService.sendChildRequest( + clusterService.localNode(), + transportPrimaryAction, + new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), + task, + transportOptions, + new TransportResponseHandler() { + @Override + public ReplicationResponse read(StreamInput in) throws IOException { + return newResponseInstance(in); + } - @Override - public void handleException(TransportException e) { - task.setPhase("finished"); - taskManager.unregister(task); - if (ExceptionsHelper.unwrap(e, NodeClosedException.class) != null) { - // node shutting down - return; + @Override + public void handleResponse(ReplicationResponse response) { + task.setPhase("finished"); + taskManager.unregister(task); } - if (ExceptionsHelper.unwrap( - e, - IndexNotFoundException.class, - AlreadyClosedException.class, - IndexShardClosedException.class - ) != null) { - // the index was deleted or the shard is closed - return; + + @Override + public void handleException(TransportException e) { + task.setPhase("finished"); + taskManager.unregister(task); + if (ExceptionsHelper.unwrap(e, NodeClosedException.class) != null) { + // node shutting down + return; + } + if (ExceptionsHelper.unwrap( + e, + IndexNotFoundException.class, + AlreadyClosedException.class, + IndexShardClosedException.class + ) != null) { + // the index was deleted or the shard is closed + return; + } + getLogger().warn(new ParameterizedMessage("{} retention lease background sync failed", shardId), e); } - getLogger().warn(new ParameterizedMessage("{} retention lease background sync failed", shardId), e); } - } - ); + ); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java index cd359678b6d1d..c27abfa2cc75a 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java @@ -110,42 +110,44 @@ final void sync( // we have to execute under the system context so that if security is enabled the sync is authorized threadContext.markAsSystemContext(); final Request request = new Request(shardId, retentionLeases); - final ReplicationTask task = (ReplicationTask) taskManager.register("transport", "retention_lease_sync", request); - transportService.sendChildRequest( - clusterService.localNode(), - transportPrimaryAction, - new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), - task, - transportOptions, - new TransportResponseHandler() { - @Override - public ReplicationResponse read(StreamInput in) throws IOException { - return newResponseInstance(in); - } + try (var ignored = threadContext.newTraceContext()) { + final ReplicationTask task = (ReplicationTask) taskManager.register("transport", "retention_lease_sync", request); + transportService.sendChildRequest( + clusterService.localNode(), + transportPrimaryAction, + new ConcreteShardRequest<>(request, primaryAllocationId, primaryTerm), + task, + transportOptions, + new TransportResponseHandler() { + @Override + public ReplicationResponse read(StreamInput in) throws IOException { + return newResponseInstance(in); + } - @Override - public void handleResponse(ReplicationResponse response) { - task.setPhase("finished"); - taskManager.unregister(task); - listener.onResponse(response); - } + @Override + public void handleResponse(ReplicationResponse response) { + task.setPhase("finished"); + taskManager.unregister(task); + listener.onResponse(response); + } - @Override - public void handleException(TransportException e) { - if (ExceptionsHelper.unwrap( - e, - IndexNotFoundException.class, - AlreadyClosedException.class, - IndexShardClosedException.class - ) == null) { - getLogger().warn(new ParameterizedMessage("{} retention lease sync failed", shardId), e); + @Override + public void handleException(TransportException e) { + if (ExceptionsHelper.unwrap( + e, + IndexNotFoundException.class, + AlreadyClosedException.class, + IndexShardClosedException.class + ) == null) { + getLogger().warn(new ParameterizedMessage("{} retention lease sync failed", shardId), e); + } + task.setPhase("finished"); + taskManager.unregister(task); + listener.onFailure(e); } - task.setPhase("finished"); - taskManager.unregister(task); - listener.onFailure(e); } - } - ); + ); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index 6efc6395f95bc..3e9d1d1adae9c 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -162,39 +162,42 @@ private void resync( ) { ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId); final TaskManager taskManager = transportService.getTaskManager(); - ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) - ActionListener wrappedListener = new ActionListener() { - @Override - public void onResponse(Void ignore) { - resyncTask.setPhase("finished"); - taskManager.unregister(resyncTask); - listener.onResponse(resyncTask); - } - @Override - public void onFailure(Exception e) { - resyncTask.setPhase("finished"); - taskManager.unregister(resyncTask); - listener.onFailure(e); + try (var ignored = transportService.getThreadPool().getThreadContext().newTraceContext()) { + ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) + ActionListener wrappedListener = new ActionListener() { + @Override + public void onResponse(Void ignore) { + resyncTask.setPhase("finished"); + taskManager.unregister(resyncTask); + listener.onResponse(resyncTask); + } + + @Override + public void onFailure(Exception e) { + resyncTask.setPhase("finished"); + taskManager.unregister(resyncTask); + listener.onFailure(e); + } + }; + try { + new SnapshotSender( + syncAction, + resyncTask, + shardId, + primaryAllocationId, + primaryTerm, + snapshot, + chunkSize.bytesAsInt(), + startingSeqNo, + maxSeqNo, + maxSeenAutoIdTimestamp, + transportService.getThreadPool().generic(), + wrappedListener + ).run(); + } catch (Exception e) { + wrappedListener.onFailure(e); } - }; - try { - new SnapshotSender( - syncAction, - resyncTask, - shardId, - primaryAllocationId, - primaryTerm, - snapshot, - chunkSize.bytesAsInt(), - startingSeqNo, - maxSeqNo, - maxSeenAutoIdTimestamp, - transportService.getThreadPool().generic(), - wrappedListener - ).run(); - } catch (Exception e) { - wrappedListener.onFailure(e); } } diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index 7f28d6302fb72..1dc3ce2d38e2f 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -23,6 +23,7 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -42,6 +43,7 @@ public class PersistentTasksNodeService implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(PersistentTasksNodeService.class); + private final ThreadPool threadPool; private final Map runningTasks = new HashMap<>(); private final PersistentTasksService persistentTasksService; private final PersistentTasksExecutorRegistry persistentTasksExecutorRegistry; @@ -49,11 +51,13 @@ public class PersistentTasksNodeService implements ClusterStateListener { private final NodePersistentTasksExecutor nodePersistentTasksExecutor; public PersistentTasksNodeService( + ThreadPool threadPool, PersistentTasksService persistentTasksService, PersistentTasksExecutorRegistry persistentTasksExecutorRegistry, TaskManager taskManager, NodePersistentTasksExecutor nodePersistentTasksExecutor ) { + this.threadPool = threadPool; this.persistentTasksService = persistentTasksService; this.persistentTasksExecutorRegistry = persistentTasksExecutorRegistry; this.taskManager = taskManager; @@ -182,56 +186,58 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, } }; - AllocatedPersistentTask task; - try { - task = (AllocatedPersistentTask) taskManager.register("persistent", taskInProgress.getTaskName() + "[c]", request); - } catch (Exception e) { - logger.error( - "Fatal error registering persistent task [" - + taskInProgress.getTaskName() - + "] with id [" - + taskInProgress.getId() - + "] and allocation id [" - + taskInProgress.getAllocationId() - + "], removing from persistent tasks", - e - ); - notifyMasterOfFailedTask(taskInProgress, e); - return; - } - - boolean processed = false; - Exception initializationException = null; - try { - task.init(persistentTasksService, taskManager, taskInProgress.getId(), taskInProgress.getAllocationId()); - logger.trace( - "Persistent task [{}] with id [{}] and allocation id [{}] was created", - task.getAction(), - task.getPersistentTaskId(), - task.getAllocationId() - ); + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + AllocatedPersistentTask task; try { - runningTasks.put(taskInProgress.getAllocationId(), task); - nodePersistentTasksExecutor.executeTask(taskInProgress.getParams(), taskInProgress.getState(), task, executor); + task = (AllocatedPersistentTask) taskManager.register("persistent", taskInProgress.getTaskName() + "[c]", request); } catch (Exception e) { - // Submit task failure - task.markAsFailed(e); + logger.error( + "Fatal error registering persistent task [" + + taskInProgress.getTaskName() + + "] with id [" + + taskInProgress.getId() + + "] and allocation id [" + + taskInProgress.getAllocationId() + + "], removing from persistent tasks", + e + ); + notifyMasterOfFailedTask(taskInProgress, e); + return; } - processed = true; - } catch (Exception e) { - initializationException = e; - } finally { - if (processed == false) { - // something went wrong - unregistering task - logger.warn( - "Persistent task [{}] with id [{}] and allocation id [{}] failed to create", + + boolean processed = false; + Exception initializationException = null; + try { + task.init(persistentTasksService, taskManager, taskInProgress.getId(), taskInProgress.getAllocationId()); + logger.trace( + "Persistent task [{}] with id [{}] and allocation id [{}] was created", task.getAction(), task.getPersistentTaskId(), task.getAllocationId() ); - taskManager.unregister(task); - if (initializationException != null) { - notifyMasterOfFailedTask(taskInProgress, initializationException); + try { + runningTasks.put(taskInProgress.getAllocationId(), task); + nodePersistentTasksExecutor.executeTask(taskInProgress.getParams(), taskInProgress.getState(), task, executor); + } catch (Exception e) { + // Submit task failure + task.markAsFailed(e); + } + processed = true; + } catch (Exception e) { + initializationException = e; + } finally { + if (processed == false) { + // something went wrong - unregistering task + logger.warn( + "Persistent task [{}] with id [{}] and allocation id [{}] failed to create", + task.getAction(), + task.getPersistentTaskId(), + task.getAllocationId() + ); + taskManager.unregister(task); + if (initializationException != null) { + notifyMasterOfFailedTask(taskInProgress, initializationException); + } } } } diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 822f2af17e054..e5a4c4d5d344b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -194,6 +194,7 @@ public TransportAction( NodePersistentTasksExecutor executor = new NodePersistentTasksExecutor(threadPool); clusterService.addListener( new PersistentTasksNodeService( + threadPool, persistentTasksService, persistentTasksExecutorRegistry, transportService.getTaskManager(), diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 6bc2370fc38c6..e5e590bd1f67d 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -122,33 +122,31 @@ public Task register(String type, String action, TaskAwareRequest request) { long maxSize = maxHeaderSize.getBytes(); ThreadContext threadContext = threadPool.getThreadContext(); - try (var ignored = threadContext.newTraceContext()) { - for (String key : taskHeaders) { - String httpHeader = threadContext.getHeader(key); - if (httpHeader != null) { - headerSize += key.length() * 2 + httpHeader.length() * 2; - if (headerSize > maxSize) { - throw new IllegalArgumentException("Request exceeded the maximum size of task headers " + maxHeaderSize); - } - headers.put(key, httpHeader); + for (String key : taskHeaders) { + String httpHeader = threadContext.getHeader(key); + if (httpHeader != null) { + headerSize += key.length() * 2 + httpHeader.length() * 2; + if (headerSize > maxSize) { + throw new IllegalArgumentException("Request exceeded the maximum size of task headers " + maxHeaderSize); } + headers.put(key, httpHeader); } - Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action, request.getParentTask(), headers); - Objects.requireNonNull(task); - assert task.getParentTaskId().equals(request.getParentTask()) : "Request [ " + request + "] didn't preserve it parentTaskId"; - if (logger.isTraceEnabled()) { - logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); - } + } + Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action, request.getParentTask(), headers); + Objects.requireNonNull(task); + assert task.getParentTaskId().equals(request.getParentTask()) : "Request [ " + request + "] didn't preserve it parentTaskId"; + if (logger.isTraceEnabled()) { + logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription()); + } - if (task instanceof CancellableTask) { - registerCancellableTask(task); - } else { - Task previousTask = tasks.put(task.getId(), task); - assert previousTask == null; - taskTracer.onTaskRegistered(threadContext, task); - } - return task; + if (task instanceof CancellableTask) { + registerCancellableTask(task); + } else { + Task previousTask = tasks.put(task.getId(), task); + assert previousTask == null; + taskTracer.onTaskRegistered(threadContext, task); } + return task; } public Task registerAndExecute( diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index abbea09050774..4c3f02affdb84 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -15,11 +15,13 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; public class RequestHandlerRegistry { + private final ThreadPool threadPool; private final String action; private final TransportRequestHandler handler; private final boolean forceExecution; @@ -29,6 +31,7 @@ public class RequestHandlerRegistry { private final Writeable.Reader requestReader; public RequestHandlerRegistry( + ThreadPool threadPool, String action, Writeable.Reader requestReader, TaskManager taskManager, @@ -37,6 +40,7 @@ public RequestHandlerRegistry( boolean forceExecution, boolean canTripCircuitBreaker ) { + this.threadPool = threadPool; this.action = action; this.requestReader = requestReader; this.handler = handler; @@ -55,19 +59,21 @@ public Request newRequest(StreamInput in) throws IOException { } public void processMessageReceived(Request request, TransportChannel channel) throws Exception { - final Task task = taskManager.register(channel.getChannelType(), action, request); - Releasable unregisterTask = () -> taskManager.unregister(task); - try { - if (channel instanceof TcpTransportChannel && task instanceof CancellableTask) { - final TcpChannel tcpChannel = ((TcpTransportChannel) channel).getChannel(); - final Releasable stopTracking = taskManager.startTrackingCancellableChannelTask(tcpChannel, (CancellableTask) task); - unregisterTask = Releasables.wrap(unregisterTask, stopTracking); + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + final Task task = taskManager.register(channel.getChannelType(), action, request); + Releasable unregisterTask = () -> taskManager.unregister(task); + try { + if (channel instanceof TcpTransportChannel && task instanceof CancellableTask) { + final TcpChannel tcpChannel = ((TcpTransportChannel) channel).getChannel(); + final Releasable stopTracking = taskManager.startTrackingCancellableChannelTask(tcpChannel, (CancellableTask) task); + unregisterTask = Releasables.wrap(unregisterTask, stopTracking); + } + final TaskTransportChannel taskTransportChannel = new TaskTransportChannel(channel, unregisterTask); + handler.messageReceived(request, taskTransportChannel, task); + unregisterTask = null; + } finally { + Releasables.close(unregisterTask); } - final TaskTransportChannel taskTransportChannel = new TaskTransportChannel(channel, unregisterTask); - handler.messageReceived(request, taskTransportChannel, task); - unregisterTask = null; - } finally { - Releasables.close(unregisterTask); } } @@ -97,6 +103,7 @@ public static RequestHandlerRegistry replaceHand TransportRequestHandler handler ) { return new RequestHandlerRegistry<>( + registry.threadPool, registry.action, registry.requestReader, registry.taskManager, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 73065a0d8a9ea..42197adf7df10 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -1037,6 +1037,7 @@ public void registerRequestHandler( validateActionName(action); handler = interceptor.interceptHandler(action, executor, false, handler); RequestHandlerRegistry reg = new RequestHandlerRegistry<>( + threadPool, action, requestReader, taskManager, @@ -1069,6 +1070,7 @@ public void registerRequestHandler( validateActionName(action); handler = interceptor.interceptHandler(action, executor, forceExecution, handler); RequestHandlerRegistry reg = new RequestHandlerRegistry<>( + threadPool, action, requestReader, taskManager, diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index dd4b8cbb0eea2..26889fed49902 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -109,6 +109,7 @@ public void testStartTask() { MockExecutor executor = new MockExecutor(); PersistentTasksNodeService coordinator = new PersistentTasksNodeService( + new TestThreadPool(getClass().getName()), persistentTasksService, registry, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), @@ -224,6 +225,7 @@ public void testParamsStatusAndNodeTaskAreDelegated() throws Exception { MockExecutor executor = new MockExecutor(); PersistentTasksNodeService coordinator = new PersistentTasksNodeService( + new TestThreadPool(getClass().getName()), persistentTasksService, registry, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), @@ -285,7 +287,13 @@ public void sendCompletionRequest( int nonLocalNodesCount = randomInt(10); MockExecutor executor = new MockExecutor(); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); - PersistentTasksNodeService coordinator = new PersistentTasksNodeService(persistentTasksService, registry, taskManager, executor); + PersistentTasksNodeService coordinator = new PersistentTasksNodeService( + new TestThreadPool(getClass().getName()), + persistentTasksService, + registry, + taskManager, + executor + ); ClusterState state = createInitialClusterState(nonLocalNodesCount, Settings.EMPTY); @@ -374,7 +382,13 @@ public void sendCompletionRequest( int nonLocalNodesCount = randomInt(10); MockExecutor executor = new MockExecutor(); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); - PersistentTasksNodeService coordinator = new PersistentTasksNodeService(persistentTasksService, registry, taskManager, executor); + PersistentTasksNodeService coordinator = new PersistentTasksNodeService( + new TestThreadPool(getClass().getName()), + persistentTasksService, + registry, + taskManager, + executor + ); ClusterState state = createInitialClusterState(nonLocalNodesCount, Settings.EMPTY); @@ -476,6 +490,7 @@ public void sendCompletionRequest( MockExecutor executor = new MockExecutor(); PersistentTasksNodeService coordinator = new PersistentTasksNodeService( + new TestThreadPool(getClass().getName()), persistentTasksService, registry, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java new file mode 100644 index 0000000000000..1a76d549919e5 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java @@ -0,0 +1,241 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.TracerProvider; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +// Shut up, IntelliJ +@SuppressWarnings("NullableProblems") +public class TestOpenTelemetry implements OpenTelemetry { + + public static final OpenTelemetry INSTANCE = new TestOpenTelemetry(); + + private final Tracer tracer; + + public TestOpenTelemetry() { + this.tracer = new TestTracer(); + } + + public Tracer getTracer() { + return tracer; + } + + @Override + public TracerProvider getTracerProvider() { + return new TracerProvider() { + @Override + public Tracer get(String instrumentationScopeName) { + return tracer; + } + + @Override + public Tracer get(String instrumentationScopeName, String instrumentationScopeVersion) { + return tracer; + } + }; + } + + @Override + public Tracer getTracer(String instrumentationScopeName) { + return this.tracer; + } + + @Override + public Tracer getTracer(String instrumentationScopeName, String instrumentationScopeVersion) { + return this.tracer; + } + + @Override + public ContextPropagators getPropagators() { + return ContextPropagators.noop(); + } + + class TestTracer implements Tracer { + + @Override + public SpanBuilder spanBuilder(String spanName) { + return new TestSpanBuilder(spanName); + } + } + + class TestSpanBuilder implements SpanBuilder { + private final String spanName; + private Context parentContext; + private Map attributes = new HashMap<>(); + private SpanKind spanKind; + private Long startTimestamp; + + TestSpanBuilder(String spanName) { + this.spanName = spanName; + } + + @Override + public SpanBuilder setParent(Context context) { + this.parentContext = context; + return this; + } + + @Override + public SpanBuilder setNoParent() { + this.parentContext = null; + return this; + } + + @Override + public SpanBuilder addLink(SpanContext spanContext) { + return this; + } + + @Override + public SpanBuilder addLink(SpanContext spanContext, Attributes attributes) { + return this; + } + + @Override + public SpanBuilder setAttribute(String key, String value) { + this.attributes.put(key, value); + return this; + } + + @Override + public SpanBuilder setAttribute(String key, long value) { + this.attributes.put(key, value); + return this; + } + + @Override + public SpanBuilder setAttribute(String key, double value) { + this.attributes.put(key, value); + return this; + } + + @Override + public SpanBuilder setAttribute(String key, boolean value) { + this.attributes.put(key, value); + return this; + } + + @Override + public SpanBuilder setAttribute(AttributeKey key, T value) { + this.attributes.put(key.getKey(), value); + return this; + } + + @Override + public SpanBuilder setSpanKind(SpanKind spanKind) { + this.spanKind = spanKind; + return this; + } + + @Override + public SpanBuilder setStartTimestamp(long startTimestamp, TimeUnit unit) { + this.startTimestamp = unit.toMillis(startTimestamp); + return this; + } + + @Override + public Span startSpan() { + if (this.startTimestamp == null) { + this.startTimestamp = System.currentTimeMillis(); + } + return new TestSpan( + spanName, + parentContext, + attributes, + spanKind, + startTimestamp + ); + } + } + + class TestSpan implements Span { + private String name; + private final Context parentContext; + private final Map attributes; + private final SpanKind spanKind; + private Throwable exception; + private Long startTimestamp; + private Long endTimestamp; + + TestSpan(String spanName, Context parentContext, Map attributes, SpanKind spanKind, Long startTimestamp) { + this.name = spanName; + this.parentContext = parentContext; + this.attributes = attributes; + this.spanKind = spanKind; + this.startTimestamp = startTimestamp; + } + + @Override + public Span setAttribute(AttributeKey key, T value) { + this.attributes.put(key.getKey(), value); + return this; + } + + @Override + public Span addEvent(String name, Attributes attributes) { + return this; + } + + @Override + public Span addEvent(String name, Attributes attributes, long timestamp, TimeUnit unit) { + return this; + } + + @Override + public Span setStatus(StatusCode statusCode, String description) { + return this; + } + + @Override + public Span recordException(Throwable exception, Attributes additionalAttributes) { + this.exception = exception; + return this; + } + + @Override + public Span updateName(String name) { + this.name = name; + return this; + } + + @Override + public void end() { + this.endTimestamp = System.currentTimeMillis(); + } + + @Override + public void end(long timestamp, TimeUnit unit) { + this.endTimestamp = unit.toMillis(timestamp); + } + + @Override + public SpanContext getSpanContext() { + return null; + } + + @Override + public boolean isRecording() { + return this.endTimestamp != null; + } + } +} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index d5d78f1bc21b3..6edda4ee48d6f 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -29,7 +29,6 @@ import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; -import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -188,7 +187,7 @@ private void createApmServices() { ResourceAttributes.SERVICE_VERSION, Version.CURRENT.toString(), ResourceAttributes.DEPLOYMENT_ENVIRONMENT, - "dev" + "sdk" ) ) ) @@ -268,8 +267,8 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // spanBuilder.setAttribute("type", "elasticsearch"); // // hack spans to avoid the 'app' span.type, will make it use external/elasticsearch // // also allows to set destination resource name in map - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); - spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); +// spanBuilder.setAttribute(SemanticAttributes.MESSAGING_SYSTEM, "elasticsearch"); +// spanBuilder.setAttribute(SemanticAttributes.MESSAGING_DESTINATION, clusterService.getNodeName()); } // this will duplicate the "resource attributes" that are defined globally @@ -291,14 +290,12 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { threadContext.putHeader(spanHeaders); - // logGraphviz(span); +// logGraphviz(span); return span; }); } - private static final Set CACHE = new HashSet<>(); - @Override public void onTraceException(Traceable traceable, Throwable throwable) { final var span = spans.get(traceable.getSpanId()); @@ -465,6 +462,8 @@ private static boolean isSupportedContextKey(String key) { return TRACE_HEADERS.contains(key); } + private static final Set GRAPHVIZ_CACHE = new HashSet<>(); + private static void logGraphviz(Span span) { final String spanStr = span.toString(); @@ -483,23 +482,24 @@ private static void logGraphviz(Span span) { j = spanStr.indexOf(",", i); String spanName = spanStr.substring(i + 5, j); - if (CACHE.add(spanId)) { - Map attrs = new HashMap<>(); - attrs.put("label", spanName); - if (spanName.startsWith("internal:")) { - attrs.put("style", "filled"); - attrs.put("fillcolor", "pink"); + if (spanName.startsWith("internal:") == false) { + if (GRAPHVIZ_CACHE.add(spanId)) { + Map attrs = new HashMap<>(); + attrs.put("label", spanName); + if (spanName.startsWith("internal:")) { + attrs.put("style", "filled"); + attrs.put("fillcolor", "pink"); + } + final String attrsString = attrs.entrySet() + .stream() + .map(each -> each.getKey() + "=\"" + each.getValue() + "\"") + .collect(Collectors.joining(",")); + LOGGER.warn("BADGER: __{} [{}]", spanId, attrsString); } - final String attrsString = attrs.entrySet() - .stream() - .map(each -> each.getKey() + "=\"" + each.getValue() + "\"") - .collect(Collectors.joining(",")); - LOGGER.warn("BADGER: __{} [{}]", spanId, attrsString); - } - if (parentSpanId != null) { - LOGGER.warn("BADGER: __{} -> __{}", spanId, parentSpanId); + if (parentSpanId != null) { + LOGGER.warn("BADGER: __{} -> __{}", spanId, parentSpanId); + } } - } } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index 1c9e40f4cfda1..d3cd7693ff323 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -83,65 +83,71 @@ public TransportSubmitAsyncSearchAction( @Override protected void doExecute(Task submitTask, SubmitAsyncSearchRequest request, ActionListener submitListener) { final SearchRequest searchRequest = createSearchRequest(request, submitTask, request.getKeepAlive()); - AsyncSearchTask searchTask = (AsyncSearchTask) taskManager.register("transport", SearchAction.INSTANCE.name(), searchRequest); - searchAction.execute(searchTask, searchRequest, searchTask.getSearchProgressActionListener()); - searchTask.addCompletionListener(new ActionListener<>() { - @Override - public void onResponse(AsyncSearchResponse searchResponse) { - if (searchResponse.isRunning() || request.isKeepOnCompletion()) { - // the task is still running and the user cannot wait more so we create - // a document for further retrieval - try { - final String docId = searchTask.getExecutionId().getDocId(); - // creates the fallback response if the node crashes/restarts in the middle of the request - // TODO: store intermediate results ? - AsyncSearchResponse initialResp = searchResponse.clone(searchResponse.getId()); - store.createResponse(docId, searchTask.getOriginHeaders(), initialResp, new ActionListener<>() { - @Override - public void onResponse(IndexResponse r) { - if (searchResponse.isRunning()) { - try { - // store the final response on completion unless the submit is cancelled - searchTask.addCompletionListener( - finalResponse -> onFinalResponse(searchTask, finalResponse, () -> {}) - ); - } finally { - submitListener.onResponse(searchResponse); + try (var ignored = threadContext.newTraceContext()) { + AsyncSearchTask searchTask = (AsyncSearchTask) taskManager.register( + "transport", + SearchAction.INSTANCE.name(), + searchRequest + ); + searchAction.execute(searchTask, searchRequest, searchTask.getSearchProgressActionListener()); + searchTask.addCompletionListener(new ActionListener<>() { + @Override + public void onResponse(AsyncSearchResponse searchResponse) { + if (searchResponse.isRunning() || request.isKeepOnCompletion()) { + // the task is still running and the user cannot wait more so we create + // a document for further retrieval + try { + final String docId = searchTask.getExecutionId().getDocId(); + // creates the fallback response if the node crashes/restarts in the middle of the request + // TODO: store intermediate results ? + AsyncSearchResponse initialResp = searchResponse.clone(searchResponse.getId()); + store.createResponse(docId, searchTask.getOriginHeaders(), initialResp, new ActionListener<>() { + @Override + public void onResponse(IndexResponse r) { + if (searchResponse.isRunning()) { + try { + // store the final response on completion unless the submit is cancelled + searchTask.addCompletionListener( + finalResponse -> onFinalResponse(searchTask, finalResponse, () -> {}) + ); + } finally { + submitListener.onResponse(searchResponse); + } + } else { + onFinalResponse(searchTask, searchResponse, () -> submitListener.onResponse(searchResponse)); } - } else { - onFinalResponse(searchTask, searchResponse, () -> submitListener.onResponse(searchResponse)); } - } - @Override - public void onFailure(Exception exc) { - onFatalFailure( - searchTask, - exc, - searchResponse.isRunning(), - "fatal failure: unable to store initial response", - submitListener - ); - } - }); - } catch (Exception exc) { - onFatalFailure(searchTask, exc, searchResponse.isRunning(), "fatal failure: generic error", submitListener); + @Override + public void onFailure(Exception exc) { + onFatalFailure( + searchTask, + exc, + searchResponse.isRunning(), + "fatal failure: unable to store initial response", + submitListener + ); + } + }); + } catch (Exception exc) { + onFatalFailure(searchTask, exc, searchResponse.isRunning(), "fatal failure: generic error", submitListener); + } + } else { + // the task completed within the timeout so the response is sent back to the user + // with a null id since nothing was stored on the cluster. + taskManager.unregister(searchTask); + submitListener.onResponse(searchResponse.clone(null)); } - } else { - // the task completed within the timeout so the response is sent back to the user - // with a null id since nothing was stored on the cluster. - taskManager.unregister(searchTask); - submitListener.onResponse(searchResponse.clone(null)); } - } - @Override - public void onFailure(Exception exc) { - // this will only ever be called if there is an issue scheduling the thread that executes - // the completion listener once the wait for completion timeout expires. - onFatalFailure(searchTask, exc, true, "fatal failure: addCompletionListener", submitListener); - } - }, request.getWaitForCompletionTimeout()); + @Override + public void onFailure(Exception exc) { + // this will only ever be called if there is an issue scheduling the thread that executes + // the completion listener once the wait for completion timeout expires. + onFatalFailure(searchTask, exc, true, "fatal failure: addCompletionListener", submitListener); + } + }, request.getWaitForCompletionTimeout()); + } } private SearchRequest createSearchRequest(SubmitAsyncSearchRequest request, Task submitTask, TimeValue keepAlive) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java index 480acc185f016..747aae64cf3cf 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -88,56 +88,58 @@ protected void doExecute(Task transportTask, Request request, ActionListener headers) { - String description = "executing enrich policy [" + request.getName() + "]"; - return new ExecuteEnrichPolicyTask(id, type, action, description, parentTaskId, headers); - } - }); - - try { - ActionListener listener; - if (request.isWaitForCompletion()) { - listener = ActionListener.wrap(result -> actionListener.onResponse(new Response(result)), actionListener::onFailure); - } else { - listener = ActionListener.wrap(result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), e -> { - if (e instanceof TaskCancelledException) { - LOGGER.info(e.getMessage()); - } else { - LOGGER.error("failed to execute policy [" + request.getName() + "]", e); - } - }); - } - policyExecutor.runPolicyLocally(task, request.getName(), ActionListener.wrap(result -> { + try (var ignored = transportService.getThreadPool().getThreadContext().newTraceContext()) { + // Can't use provided task, because in the case wait_for_completion=false then + // as soon as actionListener#onResponse is invoked then the provided task get unregistered and + // then there no way to see the policy execution in the list tasks or get task APIs. + var task = (ExecuteEnrichPolicyTask) taskManager.register("enrich", TASK_ACTION, new TaskAwareRequest() { + + @Override + public void setParentTask(TaskId taskId) { + request.setParentTask(taskId); + } + + @Override + public TaskId getParentTask() { + return request.getParentTask(); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + String description = "executing enrich policy [" + request.getName() + "]"; + return new ExecuteEnrichPolicyTask(id, type, action, description, parentTaskId, headers); + } + }); + + try { + ActionListener listener; + if (request.isWaitForCompletion()) { + listener = ActionListener.wrap(result -> actionListener.onResponse(new Response(result)), actionListener::onFailure); + } else { + listener = ActionListener.wrap(result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), e -> { + if (e instanceof TaskCancelledException) { + LOGGER.info(e.getMessage()); + } else { + LOGGER.error("failed to execute policy [" + request.getName() + "]", e); + } + }); + } + policyExecutor.runPolicyLocally(task, request.getName(), ActionListener.wrap(result -> { + taskManager.unregister(task); + listener.onResponse(result); + }, e -> { + taskManager.unregister(task); + listener.onFailure(e); + })); + + if (request.isWaitForCompletion() == false) { + TaskId taskId = new TaskId(clusterState.nodes().getLocalNodeId(), task.getId()); + actionListener.onResponse(new Response(taskId)); + } + } catch (Exception e) { taskManager.unregister(task); - listener.onResponse(result); - }, e -> { - taskManager.unregister(task); - listener.onFailure(e); - })); - - if (request.isWaitForCompletion() == false) { - TaskId taskId = new TaskId(clusterState.nodes().getLocalNodeId(), task.getId()); - actionListener.onResponse(new Response(taskId)); + throw e; } - } catch (Exception e) { - taskManager.unregister(task); - throw e; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/allocation/TrainedModelAllocationNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/allocation/TrainedModelAllocationNodeService.java index 5815a843a076b..01604c6139058 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/allocation/TrainedModelAllocationNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/allocation/TrainedModelAllocationNodeService.java @@ -347,17 +347,19 @@ void prepareModelToLoad(StartTrainedModelDeploymentAction.TaskParams taskParams) logger.debug( () -> new ParameterizedMessage("[{}] preparing to load model with task params: {}", taskParams.getModelId(), taskParams) ); - TrainedModelDeploymentTask task = (TrainedModelDeploymentTask) taskManager.register( - TRAINED_MODEL_ALLOCATION_TASK_TYPE, - TRAINED_MODEL_ALLOCATION_TASK_ACTION, - taskAwareRequest(taskParams) - ); - // threadsafe check to verify we are not loading/loaded the model - if (modelIdToTask.putIfAbsent(taskParams.getModelId(), task) == null) { - loadingModels.add(task); - } else { - // If there is already a task for the model, unregister the new task - taskManager.unregister(task); + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + TrainedModelDeploymentTask task = (TrainedModelDeploymentTask) taskManager.register( + TRAINED_MODEL_ALLOCATION_TASK_TYPE, + TRAINED_MODEL_ALLOCATION_TASK_ACTION, + taskAwareRequest(taskParams) + ); + // threadsafe check to verify we are not loading/loaded the model + if (modelIdToTask.putIfAbsent(taskParams.getModelId(), task) == null) { + loadingModels.add(task); + } else { + // If there is already a task for the model, unregister the new task + taskManager.unregister(task); + } } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java index d1607a30dabe9..2ca9fe4f1d303 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java @@ -169,20 +169,22 @@ public void asyncExecute( ActionListener listener ) { String nodeId = clusterService.localNode().getId(); - @SuppressWarnings("unchecked") - T searchTask = (T) taskManager.register("transport", action + "[a]", new AsyncRequestWrapper(request, nodeId)); - boolean operationStarted = false; - try { - operation.execute( - request, - searchTask, - wrapStoringListener(searchTask, waitForCompletionTimeout, keepAlive, keepOnCompletion, listener) - ); - operationStarted = true; - } finally { - // If we didn't start operation for any reason, we need to clean up the task that we have created - if (operationStarted == false) { - taskManager.unregister(searchTask); + try (var ignored = threadPool.getThreadContext().newTraceContext()) { + @SuppressWarnings("unchecked") + T searchTask = (T) taskManager.register("transport", action + "[a]", new AsyncRequestWrapper(request, nodeId)); + boolean operationStarted = false; + try { + operation.execute( + request, + searchTask, + wrapStoringListener(searchTask, waitForCompletionTimeout, keepAlive, keepOnCompletion, listener) + ); + operationStarted = true; + } finally { + // If we didn't start operation for any reason, we need to clean up the task that we have created + if (operationStarted == false) { + taskManager.unregister(searchTask); + } } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index f6c95c1638534..33f1391e99cb6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -111,7 +111,7 @@ public class AuthorizationService { true, Property.NodeScope ); - public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), true, Property.NodeScope); + public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), false, Property.NodeScope); private static final AuthorizationInfo SYSTEM_AUTHZ_INFO = () -> Collections.singletonMap( PRINCIPAL_ROLES_FIELD_NAME, new String[] { SystemUser.ROLE_NAME } @@ -222,11 +222,8 @@ public void authorize( final TransportRequest originalRequest, final ActionListener listener ) { - final AuthorizationContext enclosingContext = extractAuthorizationContext(threadContext, action); - Runnable stopTracing = null; - /* authorization fills in certain transient headers, which must be observed in the listener (action handler execution) * as well, but which must not bleed across different action context (eg parent-child action contexts). *

@@ -237,9 +234,7 @@ public void authorize( * We also clear tracing-related headers */ try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false, ACTION_SCOPE_AUTHORIZATION_KEYS)) { - // FIXME improve this - try (var ignore2 = threadContext.newTraceContext()) { - stopTracing = maybeStartTracing(enclosingContext, authentication, action, originalRequest); + try (var ignored = maybeStartTracing(enclosingContext, authentication, action, originalRequest)) { // this does not clear {@code AuthorizationServiceField.ORIGINATING_ACTION_KEY} // prior to doing any authorization lets set the originating action in the thread context // the originating action is the current action if no originating action has yet been set in the current thread context @@ -271,17 +266,16 @@ public void authorize( } else { final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); final AuthorizationEngine engine = getAuthorizationEngine(authentication); - final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { - threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); - maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); - }, listener::onFailure), threadContext); + final ActionListener authzInfoListener = wrapPreservingContext( + ActionListener.wrap(authorizationInfo -> { + threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); + }, listener::onFailure), + threadContext + ); engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); } } - } finally { - if (stopTracing != null) { - stopTracing.run(); - } } } @@ -335,46 +329,57 @@ private static ElasticsearchSecurityException internalError(String message) { return new ElasticsearchSecurityException(message); } - private Runnable maybeStartTracing( + // FIXME this isn't actually what we return but AutClose#close() declares an exception + private ThreadContext.StoredContext maybeStartTracing( AuthorizationContext enclosingContext, Authentication authentication, String action, TransportRequest originalRequest ) { - // Not tracing system actions - if (false == tracingEnabled || SystemUser.is(authentication.getUser()) || threadContext.isSystemContext()) { + // Not tracing system actions. Also if we're not tracing system actions, we mustn't start a new trace context. + if (shouldTrace(authentication) == false) { return () -> {}; - } else { - return authorizationTracer.startTracing(new Traceable() { - @Override - public String getSpanId() { - return "authorize_" + System.identityHashCode(originalRequest); - } + } - @Override - public String getSpanName() { - return "authorize(" + action + ")"; - } + final ThreadContext.StoredContext context = threadContext.newTraceContext(); + final Runnable stopTracing = authorizationTracer.startTracing(new Traceable() { + @Override + public String getSpanId() { + return "authorize_" + System.identityHashCode(originalRequest); + } - @Override - public Map getAttributes() { - final HashMap attributes = new HashMap<>( - Map.of( - "es.principal", - authentication.getUser().principal(), - "es.authentication.realm.name", - authentication.getAuthenticatedBy().getName(), - "es.node.name", - clusterService.getNodeName() - ) - ); - if (enclosingContext != null) { - attributes.put("originating_action", enclosingContext.getAction()); - } - return Map.copyOf(attributes); + @Override + public String getSpanName() { + return "authorize(" + action + ")"; + } + + @Override + public Map getAttributes() { + final HashMap attributes = new HashMap<>( + Map.of( + "es.principal", + authentication.getUser().principal(), + "es.authentication.realm.name", + authentication.getAuthenticatedBy().getName(), + "es.node.name", + clusterService.getNodeName() + ) + ); + if (enclosingContext != null) { + attributes.put("originating_action", enclosingContext.getAction()); } - }); - } + return Map.copyOf(attributes); + } + }); + + return () -> { + context.restore(); + stopTracing.run(); + }; + } + + private boolean shouldTrace(Authentication authentication) { + return false == (false == tracingEnabled || SystemUser.is(authentication.getUser()) || threadContext.isSystemContext()); } private void checkOperatorPrivileges(Authentication authentication, String action, TransportRequest originalRequest) From a0978c9bd9ebfda7655b2ea0a9e7a8fe16b635bb Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 13 Apr 2022 13:56:37 +0100 Subject: [PATCH 53/90] Compilation fixes --- .../http/netty4/Netty4BadRequestTests.java | 3 ++- .../http/netty4/Netty4HttpServerPipeliningTests.java | 2 +- .../common/network/NetworkModuleTests.java | 3 ++- .../http/AbstractHttpServerTransportTests.java | 12 ++++++------ .../elasticsearch/transport/InboundHandlerTests.java | 2 ++ 5 files changed, 13 insertions(+), 9 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java index 21f0309b502c8..d3ea7c5bfd7c0 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java @@ -37,6 +37,7 @@ import java.io.UncheckedIOException; import java.util.Collection; import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -89,7 +90,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(Settings.EMPTY), - tracers + List.of() ) ) { httpServerTransport.start(); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index db20a228f1c78..90b93ce1771b9 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -112,7 +112,7 @@ class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(settings), - tracers + List.of() ); } diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 9556cab19e04b..74e1aaf689d03 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -293,7 +293,8 @@ private NetworkModule newNetworkModule(Settings settings, NetworkPlugin... plugi xContentRegistry(), null, new NullDispatcher(), - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + List.of() ); } } diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 022b527c5de9d..f7e93bb69a61e 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -162,7 +162,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - tracers + List.of() ) { @Override @@ -277,7 +277,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers + List.of() ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -328,7 +328,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers + List.of() ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -482,7 +482,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers + List.of() ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -539,7 +539,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - tracers + List.of() ) { @Override @@ -615,7 +615,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - tracers + List.of() ) { @Override diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index a8e65c08bef91..d7ba04afef70b 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -99,6 +99,7 @@ public void tearDown() throws Exception { public void testPing() throws Exception { AtomicReference channelCaptor = new AtomicReference<>(); RequestHandlerRegistry registry = new RequestHandlerRegistry<>( + threadPool, "test-request", TestRequest::new, taskManager, @@ -143,6 +144,7 @@ public TestResponse read(StreamInput in) throws IOException { } }, null, action)); RequestHandlerRegistry registry = new RequestHandlerRegistry<>( + threadPool, action, TestRequest::new, taskManager, From ca896fb56b945db2c00c9e90a1974325c7f0ad49 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 13 Apr 2022 14:16:29 +0100 Subject: [PATCH 54/90] Bump version in run.sh --- run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.sh b/run.sh index f6f29016eedfc..f289c47eebd28 100755 --- a/run.sh +++ b/run.sh @@ -3,7 +3,7 @@ set -eo pipefail # This is the path that `./gradlew localDistro` prints out at the end -cd build/distribution/local/elasticsearch-8.2.0-SNAPSHOT +cd build/distribution/local/elasticsearch-8.3.0-SNAPSHOT # URL and token for sending traces SERVER_URL="" From 01d9c424b3459115205b0fd585c7388982bb0d59 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 13 Apr 2022 14:41:00 +0100 Subject: [PATCH 55/90] Fix --- run.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/run.sh b/run.sh index 484748f37ea53..9c4dbcf0c8f93 100755 --- a/run.sh +++ b/run.sh @@ -2,12 +2,14 @@ set -eo pipefail +AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) + # This is the path that `./gradlew localDistro` prints out at the end cd build/distribution/local/elasticsearch-8.3.0-SNAPSHOT # URL and token for sending traces -SERVER_URL="" -SECRET_TOKEN="" +SERVER_URL="https://apm-testing.apm.us-west2.gcp.elastic-cloud.com" +SECRET_TOKEN="bNJCAZxDjQwIFKxdk2" # Optional - override the agent jar # OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.1-SNAPSHOT/elastic-apm-agent-1.30.1-SNAPSHOT.jar" @@ -21,8 +23,6 @@ if [[ ! -f config/elasticsearch.keystore ]]; then echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' fi -AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) - AGENT_JAR="modules/apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then From fdbe843e7be38412d3936b3d6fcf00a9ab0e73c5 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 13 Apr 2022 15:35:27 +0100 Subject: [PATCH 56/90] Fixes for using latest agent version --- run.sh | 13 ++++++++----- .../src/main/plugin-metadata/plugin-security.policy | 8 ++++++++ 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/run.sh b/run.sh index 9c4dbcf0c8f93..7416bcfcb9814 100755 --- a/run.sh +++ b/run.sh @@ -2,17 +2,20 @@ set -eo pipefail +ES_VERSION=$(awk '/^elasticsearch/ { print $3 }' build-tools-internal/version.properties) AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) # This is the path that `./gradlew localDistro` prints out at the end cd build/distribution/local/elasticsearch-8.3.0-SNAPSHOT +sed -i.bak -e "s||$PWD|" modules/apm-integration/plugin-security.policy + # URL and token for sending traces -SERVER_URL="https://apm-testing.apm.us-west2.gcp.elastic-cloud.com" -SECRET_TOKEN="bNJCAZxDjQwIFKxdk2" +SERVER_URL="" +SECRET_TOKEN="" # Optional - override the agent jar -# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.1-SNAPSHOT/elastic-apm-agent-1.30.1-SNAPSHOT.jar" +# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.2-SNAPSHOT/elastic-apm-agent-1.30.2-SNAPSHOT.jar" # Clear this so that ES doesn't repeatedly complain about ignoring it export JAVA_HOME='' @@ -35,9 +38,9 @@ AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_name=elasticsearch" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.instrument=false" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.server_url=$SERVER_URL" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.secret_token=$SECRET_TOKEN" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_version=8.2.0-SNAPSHOT" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_version=$ES_VERSION-SNAPSHOT" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.environment=dev" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_level=trace" +AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_level=debug" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_file=$PWD/apm.log" AGENT_OPTS="$AGENT_OPTS -Delastic.apm.enable_experimental_instrumentations=true" diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index a169c0a13f8b9..6377f7121f203 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -16,4 +16,12 @@ grant codeBase "${codebase.elastic-apm-agent}" { permission java.lang.RuntimePermission "setFactory"; permission java.lang.RuntimePermission "setContextClassLoader"; permission java.net.SocketPermission "*", "connect,resolve"; + + // If you want to write APM logs to a file, then the following + // permissions are also required. You'll need to set a value for + // ES_HOME + // permission java.lang.RuntimePermission "accessDeclaredMembers"; + // permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // permission java.io.FilePermission "", "read"; + // permission java.io.FilePermission "", "write"; }; From 6a23b24a58ee23b278aa82cfc9d00b23e6e2987b Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 14 Apr 2022 12:27:24 +0100 Subject: [PATCH 57/90] Fully configure the APM via config file in the module --- .../gradle/plugin/PluginBuildPlugin.java | 3 ++ .../tools/launchers/JvmOptionsParser.java | 3 ++ run.sh | 39 +++++++------------ .../src/main/config/elasticapm.properties | 20 ++++++++++ .../src/main/config/jvm.options.d/apm.options | 2 + .../plugin-metadata/plugin-security.policy | 12 ++---- 6 files changed, 46 insertions(+), 33 deletions(-) create mode 100644 x-pack/plugin/apm-integration/src/main/config/elasticapm.properties create mode 100644 x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 907b09077bd70..82608e26ec363 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -10,6 +10,7 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; +import org.apache.tools.ant.filters.ReplaceTokens; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; @@ -224,6 +225,7 @@ public void execute(Task task) { private static CopySpec createBundleSpec(Project project, File pluginMetadata, TaskProvider buildProperties) { var bundleSpec = project.copySpec(); + bundleSpec.from(buildProperties); bundleSpec.from(pluginMetadata, copySpec -> { // metadata (eg custom security policy) @@ -246,6 +248,7 @@ private static CopySpec createBundleSpec(Project project, File pluginMetadata, T bundleSpec.from("src/main", copySpec -> { copySpec.include("config/**"); copySpec.include("bin/**"); + copySpec.filter(Map.of("tokens", Map.of("es.version", VersionProperties.getElasticsearch())), ReplaceTokens.class); }); return bundleSpec; } diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index afab76f814bce..d3dd246783269 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.tools.launchers; import java.io.BufferedReader; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -79,6 +80,8 @@ public static void main(final String[] args) throws InterruptedException, IOExce final String environmentPathConf = System.getenv("ES_PATH_CONF"); if (environmentPathConf != null) { substitutions.put("ES_PATH_CONF", environmentPathConf); + } else { + substitutions.put("ES_PATH_CONF", System.getenv("ES_HOME") + File.separator + "config"); } try { diff --git a/run.sh b/run.sh index 7416bcfcb9814..290a3e23b067c 100755 --- a/run.sh +++ b/run.sh @@ -2,52 +2,43 @@ set -eo pipefail -ES_VERSION=$(awk '/^elasticsearch/ { print $3 }' build-tools-internal/version.properties) +# Clear this so that ES doesn't repeatedly complain about ignoring it +export JAVA_HOME='' + AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) # This is the path that `./gradlew localDistro` prints out at the end cd build/distribution/local/elasticsearch-8.3.0-SNAPSHOT -sed -i.bak -e "s||$PWD|" modules/apm-integration/plugin-security.policy - # URL and token for sending traces SERVER_URL="" SECRET_TOKEN="" -# Optional - override the agent jar -# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.2-SNAPSHOT/elastic-apm-agent-1.30.2-SNAPSHOT.jar" - -# Clear this so that ES doesn't repeatedly complain about ignoring it -export JAVA_HOME='' - +# Configure the ES keystore, so that we can use `elastic:password` for REST +# requests if [[ ! -f config/elasticsearch.keystore ]]; then ./bin/elasticsearch-keystore create - # Use elastic:password for sending REST requests echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' fi -AGENT_JAR="modules/apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" + +# Optional - override the agent jar +# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.2-SNAPSHOT/elastic-apm-agent-1.30.2-SNAPSHOT.jar" if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then # Copy in WIP agent - cp "$OVERRIDE_AGENT_JAR" "$AGENT_JAR" + cp "$OVERRIDE_AGENT_JAR" "modules/apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" fi -AGENT_OPTS="" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_name=elasticsearch" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.instrument=false" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.server_url=$SERVER_URL" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.secret_token=$SECRET_TOKEN" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.service_version=$ES_VERSION-SNAPSHOT" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.environment=dev" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_level=debug" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.log_file=$PWD/apm.log" -AGENT_OPTS="$AGENT_OPTS -Delastic.apm.enable_experimental_instrumentations=true" +# Configure the agent +#  1. Enable the agent +# 2. Set the server URL +# 3. Set the secret token +perl -p -i -e " s|enabled: false|enabled: true| ; s|# server_url.*|server_url: $SERVER_URL| ; s|# secret_token.*|secret_token: $SECRET_TOKEN|" config/elasticapm.properties -export ES_SERVER_OPTS="-ea -javaagent:$AGENT_JAR $AGENT_OPTS" # export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:5007 " -export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007 " +# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007 " # export ES_JAVA_OPTS="-Djava.security.debug=failure" # export ES_JAVA_OPTS="-Djava.security.debug=access,failure" diff --git a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties new file mode 100644 index 0000000000000..1a4cf53362d08 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties @@ -0,0 +1,20 @@ +# Set to `true` to enable the APM agent +enabled: false + +service_version: @es.version@ + +# ES does not use auto-instrumentation +instrument: false + +# Required for OpenTelemetry support +enable_experimental_instrumentations: true + +# server_url: +# secret_token: + +service_name: elasticsearch + +environment: dev + +log_level: error +log_file: _AGENT_HOME_/../../logs/apm.log diff --git a/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options b/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options new file mode 100644 index 0000000000000..0b5afdccfdf2f --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options @@ -0,0 +1,2 @@ +# Elastic APM agent config file +-Delastic.apm.config_file=${ES_PATH_CONF}/elasticapm.properties diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index 6377f7121f203..be49209da37db 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -13,15 +13,9 @@ grant { }; grant codeBase "${codebase.elastic-apm-agent}" { - permission java.lang.RuntimePermission "setFactory"; + permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "setContextClassLoader"; + permission java.lang.RuntimePermission "setFactory"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.net.SocketPermission "*", "connect,resolve"; - - // If you want to write APM logs to a file, then the following - // permissions are also required. You'll need to set a value for - // ES_HOME - // permission java.lang.RuntimePermission "accessDeclaredMembers"; - // permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - // permission java.io.FilePermission "", "read"; - // permission java.io.FilePermission "", "write"; }; From 694180bf5a4075da4b7ce6294d158e8f9fb117d0 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 14 Apr 2022 12:37:05 +0100 Subject: [PATCH 58/90] Tidy up --- distribution/docker/build.gradle | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 66539041b6d8d..caa007ec0a58d 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -330,8 +330,7 @@ private static List generateTags(DockerBase base) { return [ "${image}:test", "${image}:${version}", - "docker.elastic.co/${namespace}/${image}:${version}", - "docker.elastic.co/${namespace}/${image}:${version}-agent" + "docker.elastic.co/${namespace}/${image}:${version}" ] } From 89f90c2d608d4bcc674840f0e3cfadaa7673f927 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 18 Apr 2022 15:29:37 +0100 Subject: [PATCH 59/90] Mass-refactoring * Open and close scopes as well as using spans. Despite a heavy caveat on closing scopes in the same thread in which they are opened, this appears to work. * Only allow a single Tracer to be installed, and provie a no-op implementation. --- .../netty4/Netty4HttpServerTransport.java | 5 +- .../transport/netty4/Netty4Plugin.java | 4 +- .../elasticsearch/action/ActionModule.java | 7 +- .../cluster/service/ClusterService.java | 1 + .../common/network/NetworkModule.java | 4 +- .../common/util/concurrent/ThreadContext.java | 52 +- .../http/AbstractHttpServerTransport.java | 8 +- .../org/elasticsearch/http/HttpTracer.java | 22 +- .../java/org/elasticsearch/node/Node.java | 38 +- .../elasticsearch/plugins/NetworkPlugin.java | 2 +- .../org/elasticsearch/plugins/Plugin.java | 4 +- .../elasticsearch/rest/RestController.java | 2 +- .../java/org/elasticsearch/tasks/Task.java | 2 + .../org/elasticsearch/tasks/TaskManager.java | 20 +- .../org/elasticsearch/tasks/TaskTracer.java | 69 -- .../org/elasticsearch/tracing/Tracer.java | 28 + .../transport/RequestHandlerRegistry.java | 9 +- .../transport/TransportService.java | 24 +- .../action/ActionModuleTests.java | 4 - .../common/network/NetworkModuleTests.java | 8 +- .../service/FakeThreadPoolMasterService.java | 4 +- .../java/org/elasticsearch/node/MockNode.java | 25 +- .../test/transport/MockTransportService.java | 4 +- .../org/elasticsearch/xpack/apm/ApmIT.java | 764 +++++++++--------- .../src/main/config/elasticapm.properties | 1 + .../java/org/elasticsearch/xpack/apm/APM.java | 6 +- .../elasticsearch/xpack/apm/APMTracer.java | 197 +++-- .../core/LocalStateCompositeXPackPlugin.java | 4 +- .../xpack/security/AuthorizationTracer.java | 73 -- .../xpack/security/Security.java | 18 +- .../security/authz/AuthorizationService.java | 137 +--- .../SecurityNetty4HttpServerTransport.java | 4 +- .../xpack/security/LocalStateSecurity.java | 4 +- .../xpack/security/SecurityTests.java | 1 - .../authz/AuthorizationServiceTests.java | 13 +- 35 files changed, 707 insertions(+), 861 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/tasks/TaskTracer.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 323714e98e259..b9ac960aa482e 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -59,7 +59,6 @@ import java.net.InetSocketAddress; import java.net.SocketOption; -import java.util.List; import java.util.concurrent.TimeUnit; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE; @@ -150,9 +149,9 @@ public Netty4HttpServerTransport( Dispatcher dispatcher, ClusterSettings clusterSettings, SharedGroupFactory sharedGroupFactory, - List tracers + Tracer tracer ) { - super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, tracers); + super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, tracer); Netty4Utils.setAvailableProcessors(EsExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); this.sharedGroupFactory = sharedGroupFactory; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java index 1359263606e3b..541f859160d4a 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Plugin.java @@ -100,7 +100,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { return Collections.singletonMap( NETTY_HTTP_TRANSPORT_NAME, @@ -113,7 +113,7 @@ public Map> getHttpTransports( dispatcher, clusterSettings, getSharedGroupFactory(settings), - tracers + tracer ) ); } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 2a36e0e8d5649..d0733b9322d25 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -405,14 +405,12 @@ import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.tracing.Tracer; import org.elasticsearch.usage.UsageService; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import java.util.function.Supplier; @@ -443,7 +441,6 @@ public class ActionModule extends AbstractModule { private final RequestValidators mappingRequestValidators; private final RequestValidators indicesAliasesRequestRequestValidators; private final ThreadPool threadPool; - private final List tracers; public ActionModule( Settings settings, @@ -456,8 +453,7 @@ public ActionModule( NodeClient nodeClient, CircuitBreakerService circuitBreakerService, UsageService usageService, - SystemIndices systemIndices, - List tracers + SystemIndices systemIndices ) { this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -468,7 +464,6 @@ public ActionModule( this.threadPool = threadPool; actions = setupActions(actionPlugins); actionFilters = setupActionFilters(actionPlugins); - this.tracers = Objects.requireNonNullElse(tracers, List.of()); autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver, systemIndices); destructiveOperations = new DestructiveOperations(settings, clusterSettings); diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 557498a31fa27..ad10b7f6e9e8b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -28,6 +28,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java index a36a76bbe324e..af16b3f39745a 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -123,7 +123,7 @@ public NetworkModule( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { this.settings = settings; for (NetworkPlugin plugin : plugins) { @@ -137,7 +137,7 @@ public NetworkModule( networkService, dispatcher, clusterSettings, - tracers + tracer ); for (Map.Entry> entry : httpTransportFactory.entrySet()) { registerHttpTransport(entry.getKey(), entry.getValue()); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 4f2732e2aa56b..49d6b487b5bbf 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -98,16 +98,17 @@ public ThreadContext(Settings settings) { /** * Removes the current context and resets a default context. The removed context can be * restored by closing the returned {@link StoredContext}. + * @return a stored context that will restore the current context to its state at the point this method was called */ public StoredContext stashContext() { final ThreadContextStruct context = threadLocal.get(); - /** + + /* * X-Opaque-ID should be preserved in a threadContext in order to propagate this across threads. * This is needed so the DeprecationLogger in another thread can see the value of X-Opaque-ID provided by a user. * The same is applied to Task.TRACE_ID. * Otherwise when context is stash, it should be empty. */ - boolean hasHeadersToCopy = false; if (context.requestHeaders.isEmpty() == false) { for (String header : HEADERS_TO_COPY) { @@ -117,9 +118,27 @@ public StoredContext stashContext() { } } } - if (hasHeadersToCopy) { - Map map = headers(context); - ThreadContextStruct threadContextStruct = DEFAULT_CONTEXT.putHeaders(map); + // HACK HACK HACK + final Map copiedTransientEntries = new HashMap<>(); + if (context.transientHeaders.isEmpty() == false) { + List transientKeysToCopy = List.of(Task.APM_TRACE_CONTEXT); + for (String transientKey : transientKeysToCopy) { + if (context.transientHeaders.containsKey(transientKey)) { + copiedTransientEntries.put(transientKey, context.transientHeaders.get(transientKey)); + } + } + } + if (hasHeadersToCopy || copiedTransientEntries.isEmpty() == false) { + ThreadContextStruct threadContextStruct = DEFAULT_CONTEXT; + if (hasHeadersToCopy) { + Map map = headers(context); + threadContextStruct = DEFAULT_CONTEXT.putHeaders(map); + } + if (copiedTransientEntries.isEmpty() == false) { + for (Map.Entry entry : copiedTransientEntries.entrySet()) { + threadContextStruct = threadContextStruct.putTransient(entry.getKey(), entry.getValue()); + } + } threadLocal.set(threadContextStruct); } else { threadLocal.set(DEFAULT_CONTEXT); @@ -138,15 +157,20 @@ public StoredContext newTraceContext() { final Map newTransientHeaders = new HashMap<>(context.transientHeaders); final String previousTraceParent = newRequestHeaders.remove(Task.TRACE_PARENT_HTTP_HEADER); - final String previousTraceState = newRequestHeaders.remove(Task.TRACE_STATE); - if (previousTraceParent != null) { newTransientHeaders.put("parent_" + Task.TRACE_PARENT_HTTP_HEADER, previousTraceParent); } + + final String previousTraceState = newRequestHeaders.remove(Task.TRACE_STATE); if (previousTraceState != null) { newTransientHeaders.put("parent_" + Task.TRACE_STATE, previousTraceState); } + final Object previousTraceContext = newTransientHeaders.remove(Task.APM_TRACE_CONTEXT); + if (previousTraceContext != null) { + newTransientHeaders.put("parent_" + Task.APM_TRACE_CONTEXT, previousTraceContext); + } + threadLocal.set( new ThreadContextStruct( newRequestHeaders, @@ -169,6 +193,8 @@ public StoredContext clearTraceContext() { newTransientHeaders.remove("parent_" + Task.TRACE_PARENT_HTTP_HEADER); newTransientHeaders.remove("parent_" + Task.TRACE_STATE); + newTransientHeaders.remove(Task.APM_TRACE_CONTEXT); + newTransientHeaders.remove("parent_" + Task.APM_TRACE_CONTEXT); threadLocal.set( new ThreadContextStruct( @@ -183,8 +209,8 @@ public StoredContext clearTraceContext() { } private static Map headers(ThreadContextStruct context) { - Map map = Maps.newMapWithExpectedSize(org.elasticsearch.tasks.Task.HEADERS_TO_COPY.size()); - for (String header : org.elasticsearch.tasks.Task.HEADERS_TO_COPY) { + Map map = Maps.newMapWithExpectedSize(HEADERS_TO_COPY.size()); + for (String header : HEADERS_TO_COPY) { final String value = context.requestHeaders.get(header); if (value != null) { map.put(header, value); @@ -730,7 +756,13 @@ private ThreadContextStruct putResponse( return this; } } - return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize); + return new ThreadContextStruct( + requestHeaders, + newResponseHeaders, + transientHeaders, + isSystemContext, + newWarningHeaderSize + ); } private ThreadContextStruct putTransient(String key, Object value) { diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index fc85b6e058030..b4ffb940bbe3b 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -101,7 +101,7 @@ protected AbstractHttpServerTransport( NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { this.settings = settings; this.networkService = networkService; @@ -126,7 +126,7 @@ protected AbstractHttpServerTransport( this.port = SETTING_HTTP_PORT.get(settings); this.maxContentLength = SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings); - this.tracer = new HttpTracer(settings, clusterSettings, tracers); + this.tracer = new HttpTracer(settings, clusterSettings, tracer); clusterSettings.addSettingsUpdateConsumer( TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING, slowLogThreshold -> this.slowLogThresholdMs = slowLogThreshold.getMillis() @@ -465,10 +465,6 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan dispatchRequest(restRequest, channel, badRequestCause); } - protected void onTraceStarted(ThreadContext threadContext, RestChannel restChannel) { - tracer.onTraceStarted(threadContext, restChannel); - } - private RestRequest requestWithoutFailedHeader( HttpRequest httpRequest, HttpChannel httpChannel, diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index bb854bba82f58..a66d27a4d1fac 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -32,13 +32,13 @@ class HttpTracer { private final Logger logger = LogManager.getLogger(HttpTracer.class); - private final List tracers; + private final Tracer tracer; private volatile String[] tracerLogInclude; private volatile String[] tracerLogExclude; - HttpTracer(Settings settings, ClusterSettings clusterSettings, List tracers) { - this.tracers = tracers; + HttpTracer(Settings settings, ClusterSettings clusterSettings, Tracer tracer) { + this.tracer = tracer; setTracerLogInclude(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_INCLUDE.get(settings)); setTracerLogExclude(HttpTransportSettings.SETTING_HTTP_TRACE_LOG_EXCLUDE.get(settings)); @@ -48,35 +48,35 @@ class HttpTracer { } void onTraceStarted(ThreadContext threadContext, RestChannel channel) { - this.tracers.forEach(t -> t.onTraceStarted(threadContext, channel)); + this.tracer.onTraceStarted(threadContext, channel); } void onTraceStopped(RestChannel channel) { - this.tracers.forEach(t -> t.onTraceStopped(channel)); + this.tracer.onTraceStopped(channel); } void onTraceEvent(RestChannel channel, String eventName) { - this.tracers.forEach(t -> t.onTraceEvent(channel, eventName)); + this.tracer.onTraceEvent(channel, eventName); } public void onTraceException(RestChannel channel, Throwable throwable) { - this.tracers.forEach(t -> t.onTraceException(channel, throwable)); + this.tracer.onTraceException(channel, throwable); } void setAttribute(Traceable traceable, String key, boolean value) { - this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + this.tracer.setAttribute(traceable, key, value); } void setAttribute(Traceable traceable, String key, double value) { - this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + this.tracer.setAttribute(traceable, key, value); } void setAttribute(Traceable traceable, String key, long value) { - this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + this.tracer.setAttribute(traceable, key, value); } void setAttribute(Traceable traceable, String key, String value) { - this.tracers.forEach(t -> t.setAttribute(traceable, key, value)); + this.tracer.setAttribute(traceable, key, value); } /** diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 4a21c72d90604..294f755dfc4a6 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -177,7 +177,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.tasks.TaskResultsService; -import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Tracer; @@ -215,7 +214,6 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; - import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; @@ -692,10 +690,7 @@ protected Node( ) .toList(); - final List tracers = pluginComponents.stream() - .map(c -> c instanceof Tracer t ? t : null) - .filter(Objects::nonNull) - .toList(); + final Tracer tracer = getTracer(pluginComponents); ActionModule actionModule = new ActionModule( settings, @@ -708,8 +703,7 @@ protected Node( client, circuitBreakerService, usageService, - systemIndices, - tracers + systemIndices ); modules.add(actionModule); @@ -726,7 +720,7 @@ protected Node( networkService, restController, clusterService.getClusterSettings(), - tracers + tracer ); Collection>> indexTemplateMetadataUpgraders = pluginsService.filterPlugins( Plugin.class @@ -756,7 +750,8 @@ protected Node( networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), - taskHeaders + taskHeaders, + tracer ); final GatewayMetaState gatewayMetaState = new GatewayMetaState(); final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); @@ -770,10 +765,7 @@ protected Node( clusterService.setTaskManager(transportService.getTaskManager()); - final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - tracers.forEach(taskTracer::addTracer); - - pluginsService.filterPlugins(Plugin.class).forEach(plugin -> plugin.onTracers(tracers)); + pluginsService.filterPlugins(Plugin.class).forEach(plugin -> plugin.onTracer(tracer)); final RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); RepositoriesModule repositoriesModule = new RepositoriesModule( @@ -1056,6 +1048,19 @@ protected Node( } } + private Tracer getTracer(Collection pluginComponents) { + final List tracers = pluginComponents.stream() + .map(c -> c instanceof Tracer t ? t : null) + .filter(Objects::nonNull) + .toList(); + + if (tracers.size() > 1) { + throw new IllegalStateException("A single Tracer was expected but got: " + tracers); + } + + return tracers.isEmpty() ? Tracer.NOOP : tracers.get(0); + } + private HealthService createHealthService(ClusterService clusterService) { var serverHealthIndicatorServices = List.of( new InstanceHasMasterHealthIndicatorService(clusterService), @@ -1099,9 +1104,10 @@ protected TransportService newTransportService( TransportInterceptor interceptor, Function localNodeFactory, ClusterSettings clusterSettings, - Set taskHeaders + Set taskHeaders, + Tracer tracer ) { - return new TransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders); + return new TransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders, tracer); } protected void processRecoverySettings(ClusterSettings clusterSettings, RecoverySettings recoverySettings) { diff --git a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java index 460a0100a8172..80561167b6442 100644 --- a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java @@ -76,7 +76,7 @@ default Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { return Collections.emptyMap(); } diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index a9fbd72490122..f134ccb57ec16 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -208,7 +208,7 @@ public Collection getAdditionalIndexSettingProviders() { } /** - * Called with a list of Tracers so that each plugin can have a chance to work with them. + * Called with a Tracers so that each plugin has a chance to work with it. */ - public void onTracers(List tracers) {} + public void onTracer(Tracer tracer) {} } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index ba62876852654..3f711f06f1e13 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -301,7 +301,7 @@ public void registerHandler(final RestHandler handler) { @Override public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE); - try { + try (var ignored = threadContext.newTraceContext()) { tryAllHandlers(request, channel, threadContext); } catch (Exception e) { try { diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 7644969d5029b..6cc1212f1e803 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -49,6 +49,8 @@ public class Task implements Traceable { */ public static final String TRACE_STATE = "tracestate"; + public static final String APM_TRACE_CONTEXT = "apm.local.context"; + /** * Parsed part of traceparent. It is stored in thread context and emitted in logs. * Has to be declared as a header copied over for tasks. diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 7c34116ea2dd3..99ece2e064de1 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -38,6 +38,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.TaskTransportChannel; import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransportChannel; @@ -93,16 +94,17 @@ public class TaskManager implements ClusterStateApplier { private DiscoveryNodes lastDiscoveryNodes = DiscoveryNodes.EMPTY_NODES; - private final TaskTracer taskTracer = new TaskTracer(); + private final Tracer tracer; private final ByteSizeValue maxHeaderSize; private final Map channelPendingTaskTrackers = ConcurrentCollections.newConcurrentMap(); private final SetOnce cancellationService = new SetOnce<>(); - public TaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { + public TaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { this.threadPool = threadPool; this.taskHeaders = taskHeaders.toArray(Strings.EMPTY_ARRAY); this.maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); + this.tracer = tracer; } public void setTaskResultsService(TaskResultsService taskResultsService) { @@ -144,8 +146,8 @@ public Task register(String type, String action, TaskAwareRequest request) { registerCancellableTask(task); } else { Task previousTask = tasks.put(task.getId(), task); + tracer.onTraceStarted(threadContext, task); assert previousTask == null; - taskTracer.onTaskRegistered(threadContext, task); } return task; } @@ -163,6 +165,7 @@ public Task reg } else { unregisterChildNode = null; } + try (var ignored = threadPool.getThreadContext().newTraceContext()) { final Task task; try { @@ -171,8 +174,7 @@ public Task reg Releasables.close(unregisterChildNode); throw e; } - // NOTE: ActionListener cannot infer Response, see https://bugs.openjdk.java.net/browse/JDK-8203195 - action.execute(task, request, new ActionListener() { + action.execute(task, request, new ActionListener<>() { @Override public void onResponse(Response response) { try { @@ -208,7 +210,7 @@ private void registerCancellableTask(Task task) { CancellableTask cancellableTask = (CancellableTask) task; CancellableTaskHolder holder = new CancellableTaskHolder(cancellableTask); cancellableTasks.put(task, holder); - taskTracer.onTaskRegistered(threadPool.getThreadContext(), task); + tracer.onTraceStarted(threadPool.getThreadContext(), task); // Check if this task was banned before we start it. The empty check is used to avoid // computing the hash code of the parent taskId as most of the time bannedParents is empty. if (task.getParentTaskId().isSet() && bannedParents.isEmpty() == false) { @@ -263,7 +265,7 @@ public Task unregister(Task task) { return removedTask; } } finally { - taskTracer.onTaskUnregistered(task); + tracer.onTraceStopped(task); } } @@ -755,8 +757,4 @@ public void cancelTaskAndDescendants(CancellableTask task, String reason, boolea public List getTaskHeaders() { return List.of(taskHeaders); } - - public TaskTracer getTaskTracer() { - return taskTracer; - } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java b/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java deleted file mode 100644 index c0b2091a1fa9f..0000000000000 --- a/server/src/main/java/org/elasticsearch/tasks/TaskTracer.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.tasks; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.tracing.Tracer; - -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -public class TaskTracer { - - private static final Logger logger = LogManager.getLogger(TaskTracer.class); - - private final List tracers = new CopyOnWriteArrayList<>(); - - public void addTracer(Tracer tracer) { - if (tracer != null) { - tracers.add(tracer); - } - } - - public void onTaskRegistered(ThreadContext threadContext, Task task) { - for (Tracer tracer : tracers) { - try { - tracer.onTraceStarted(threadContext, task); - } catch (Exception e) { - assert false : e; - logger.warn( - new ParameterizedMessage( - "task tracing listener [{}] failed on registration of task [{}][{}]", - tracer, - task.getId(), - task.getAction() - ), - e - ); - } - } - } - - public void onTaskUnregistered(Task task) { - for (Tracer tracer : tracers) { - try { - tracer.onTraceStopped(task); - } catch (Exception e) { - assert false : e; - logger.warn( - new ParameterizedMessage( - "task tracing listener [{}] failed on unregistration of task [{}][{}]", - tracer, - task.getId(), - task.getAction() - ), - e - ); - } - } - } -} diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index 37dad1e7a29d5..36ce1f3fa9428 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -36,4 +36,32 @@ public interface Tracer { void setAttribute(Traceable traceable, String key, long value); void setAttribute(Traceable traceable, String key, String value); + + Tracer NOOP = new Tracer() { + @Override + public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { + return; + } + + @Override + public void onTraceStopped(Traceable traceable) {} + + @Override + public void onTraceEvent(Traceable traceable, String eventName) {} + + @Override + public void onTraceException(Traceable traceable, Throwable throwable) {} + + @Override + public void setAttribute(Traceable traceable, String key, boolean value) {} + + @Override + public void setAttribute(Traceable traceable, String key, double value) {} + + @Override + public void setAttribute(Traceable traceable, String key, long value) {} + + @Override + public void setAttribute(Traceable traceable, String key, String value) {} + }; } diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 4c3f02affdb84..ee62648e98aae 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -16,6 +16,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import java.io.IOException; @@ -28,6 +29,7 @@ public class RequestHandlerRegistry { private final boolean canTripCircuitBreaker; private final String executor; private final TaskManager taskManager; + private final Tracer tracer; private final Writeable.Reader requestReader; public RequestHandlerRegistry( @@ -38,7 +40,8 @@ public RequestHandlerRegistry( TransportRequestHandler handler, String executor, boolean forceExecution, - boolean canTripCircuitBreaker + boolean canTripCircuitBreaker, + Tracer tracer ) { this.threadPool = threadPool; this.action = action; @@ -48,6 +51,7 @@ public RequestHandlerRegistry( this.canTripCircuitBreaker = canTripCircuitBreaker; this.executor = executor; this.taskManager = taskManager; + this.tracer = tracer; } public String getAction() { @@ -110,7 +114,8 @@ public static RequestHandlerRegistry replaceHand handler, registry.executor, registry.forceExecution, - registry.canTripCircuitBreaker + registry.canTripCircuitBreaker, + registry.tracer ); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index ed4f55d451edf..5fc20f8b84e23 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -42,6 +42,7 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import java.io.IOException; import java.io.UncheckedIOException; @@ -103,6 +104,7 @@ protected boolean removeEldestEntry(Map.Entry eldest) { // tracer log private final Logger tracerLog; + private final Tracer tracer; volatile String[] tracerLogInclude; volatile String[] tracerLogExclude; @@ -180,7 +182,8 @@ public TransportService( TransportInterceptor transportInterceptor, Function localNodeFactory, @Nullable ClusterSettings clusterSettings, - Set taskHeaders + Set taskHeaders, + Tracer tracer ) { this( settings, @@ -190,7 +193,8 @@ public TransportService( localNodeFactory, clusterSettings, taskHeaders, - new ClusterConnectionManager(settings, transport, threadPool.getThreadContext()) + new ClusterConnectionManager(settings, transport, threadPool.getThreadContext()), + tracer ); } @@ -202,18 +206,20 @@ public TransportService( Function localNodeFactory, @Nullable ClusterSettings clusterSettings, Set taskHeaders, - ConnectionManager connectionManager + ConnectionManager connectionManager, + Tracer tracer ) { this.transport = transport; transport.setSlowLogThreshold(TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING.get(settings)); this.threadPool = threadPool; this.localNodeFactory = localNodeFactory; this.connectionManager = connectionManager; + this.tracer = tracer; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); setTracerLogInclude(TransportSettings.TRACE_LOG_INCLUDE_SETTING.get(settings)); setTracerLogExclude(TransportSettings.TRACE_LOG_EXCLUDE_SETTING.get(settings)); tracerLog = Loggers.getLogger(logger, ".tracer"); - taskManager = createTaskManager(settings, threadPool, taskHeaders); + taskManager = createTaskManager(settings, threadPool, taskHeaders, tracer); this.interceptor = transportInterceptor; this.asyncSender = interceptor.interceptSender(this::sendRequestInternal); this.remoteClusterClient = DiscoveryNode.isRemoteClusterClient(settings); @@ -255,8 +261,8 @@ public TaskManager getTaskManager() { return taskManager; } - protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { - return new TaskManager(settings, threadPool, taskHeaders); + protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { + return new TaskManager(settings, threadPool, taskHeaders, tracer); } void setTracerLogInclude(List tracerLogInclude) { @@ -1045,7 +1051,8 @@ public void registerRequestHandler( handler, executor, false, - true + true, + tracer ); transport.registerRequestHandler(reg); } @@ -1078,7 +1085,8 @@ public void registerRequestHandler( handler, executor, forceExecution, - canTripCircuitBreaker + canTripCircuitBreaker, + tracer ); transport.registerRequestHandler(reg); } diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 7142adc106b74..0dd385a069208 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -115,7 +115,6 @@ public void testSetupRestHandlerContainsKnownBuiltin() { null, null, usageService, - null, null ); actionModule.initRestHandlers(null); @@ -172,7 +171,6 @@ public String getName() { null, null, usageService, - null, null ); Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); @@ -222,7 +220,6 @@ public List getRestHandlers( null, null, usageService, - null, null ); actionModule.initRestHandlers(null); @@ -267,7 +264,6 @@ public void test3rdPartyHandlerIsNotInstalled() { null, null, usageService, - null, null ) ); diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 74e1aaf689d03..fe9958b906b10 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -121,7 +121,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { return Collections.singletonMap("custom", custom); } @@ -167,7 +167,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); @@ -211,7 +211,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { Map> supplierMap = new HashMap<>(); supplierMap.put("custom", custom); @@ -294,7 +294,7 @@ private NetworkModule newNetworkModule(Settings settings, NetworkPlugin... plugi null, new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - List.of() + Tracer.NOOP ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java b/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java index b9dfd40884f34..935eefd633d03 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.node.Node; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import java.util.ArrayList; import java.util.List; @@ -50,7 +51,8 @@ public FakeThreadPoolMasterService( super( Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), nodeName).build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - threadPool + threadPool, + Tracer.NOOP ); this.name = serviceName; this.onTaskAvailableToRun = onTaskAvailableToRun; diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index df2103178d50a..3e3a93c224bdb 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -39,6 +39,7 @@ import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportService; @@ -178,16 +179,34 @@ protected TransportService newTransportService( TransportInterceptor interceptor, Function localNodeFactory, ClusterSettings clusterSettings, - Set taskHeaders + Set taskHeaders, + Tracer tracer ) { // we use the MockTransportService.TestPlugin class as a marker to create a network // module with this MockNetworkService. NetworkService is such an integral part of the systme // we don't allow to plug it in from plugins or anything. this is a test-only override and // can't be done in a production env. if (getPluginsService().filterPlugins(MockTransportService.TestPlugin.class).isEmpty()) { - return super.newTransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders); + return super.newTransportService( + settings, + transport, + threadPool, + interceptor, + localNodeFactory, + clusterSettings, + taskHeaders, + tracer + ); } else { - return new MockTransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders); + return new MockTransportService( + settings, + transport, + threadPool, + interceptor, + localNodeFactory, + clusterSettings, + taskHeaders + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 624613304a6e0..96218ba9ea2fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -36,6 +36,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -232,7 +233,8 @@ private MockTransportService( localNodeFactory, clusterSettings, taskHeaders, - new StubbableConnectionManager(new ClusterConnectionManager(settings, transport, threadPool.getThreadContext())) + new StubbableConnectionManager(new ClusterConnectionManager(settings, transport, threadPool.getThreadContext())), + Tracer.NOOP ); this.original = transport.getDelegate(); } diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index 7bf164e30f0eb..d52caf6286569 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -7,390 +7,390 @@ package org.elasticsearch.xpack.apm; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.sdk.trace.data.SpanData; - -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchTransportService; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.cluster.coordination.PublicationTransportHandler; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskTracer; +//import io.opentelemetry.api.common.AttributeKey; +//import io.opentelemetry.sdk.trace.data.SpanData; +// +//import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +//import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +//import org.elasticsearch.action.bulk.BulkRequestBuilder; +//import org.elasticsearch.action.index.IndexRequestBuilder; +//import org.elasticsearch.action.search.SearchAction; +//import org.elasticsearch.action.search.SearchTransportService; +//import org.elasticsearch.action.support.WriteRequest; +//import org.elasticsearch.client.Request; +//import org.elasticsearch.client.Response; +//import org.elasticsearch.cluster.coordination.PublicationTransportHandler; +//import org.elasticsearch.cluster.metadata.IndexMetadata; +//import org.elasticsearch.common.settings.SecureString; +//import org.elasticsearch.common.settings.Settings; +//import org.elasticsearch.common.util.CollectionUtils; +//import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; +//import org.elasticsearch.plugins.Plugin; +//import org.elasticsearch.plugins.PluginsService; +//import org.elasticsearch.tasks.Task; +//import org.elasticsearch.tasks.TaskId; +//import org.elasticsearch.tasks.TaskTracer; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.junit.After; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import static java.util.stream.Collectors.toList; -import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; -import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; +//import org.elasticsearch.test.SecuritySettingsSource; +//import org.elasticsearch.test.SecuritySettingsSourceField; +//import org.elasticsearch.transport.TransportService; +//import org.elasticsearch.xcontent.XContentType; +//import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +//import org.junit.After; +// +//import java.util.Collection; +//import java.util.Collections; +//import java.util.List; +//import java.util.concurrent.TimeUnit; +//import java.util.stream.Collectors; +// +//import static java.util.stream.Collectors.toList; +//import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; +//import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; +//import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +//import static org.hamcrest.Matchers.contains; +//import static org.hamcrest.Matchers.empty; +//import static org.hamcrest.Matchers.equalTo; +//import static org.hamcrest.Matchers.hasItems; +//import static org.hamcrest.Matchers.hasSize; +//import static org.hamcrest.Matchers.notNullValue; public class ApmIT extends SecurityIntegTestCase { - @Override - protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - // ((MockSecureSettings) builder.getSecureSettings()).setString( - // APMTracer.APM_ENDPOINT_SETTING.getKey(), - // System.getProperty("tests.apm.endpoint", "") - // ); - // ((MockSecureSettings) builder.getSecureSettings()).setString( - // APMTracer.APM_TOKEN_SETTING.getKey(), - // System.getProperty("tests.apm.token", "") - // ); - builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); - return builder.build(); - } - - @Override - protected boolean addMockHttpTransport() { - return false; - } - - @After - public void clearRecordedSpans() { - APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - } - - public void testModule() { - List plugins = internalCluster().getAnyMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); - assertThat(plugins, hasSize(1)); - - TransportService transportService = internalCluster().getInstance(TransportService.class); - final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - assertThat(taskTracer, notNullValue()); - - final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); - - APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - - taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); - taskTracer.onTaskUnregistered(testTask); - - final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); - boolean found = false; - final Long targetId = testTask.getId(); - for (SpanData capturedSpan : capturedSpans) { - if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { - found = true; - assertTrue(capturedSpan.hasEnded()); - } - } - assertTrue(found); - } - - public void testRecordsNestedSpans() { - - APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - - client().admin().cluster().prepareListTasks().get(); - - var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - assertThat(parentTasks, hasSize(1)); - var parentTask = parentTasks.get(0); - assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); - - var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - assertThat(childrenTasks, hasSize(internalCluster().size())); - for (SpanData childrenTask : childrenTasks) { - assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); - assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); - } - } - - public void testRecovery() throws Exception { - internalCluster().ensureAtLeastNumDataNodes(2); - - assertAcked( - client().admin() - .indices() - .prepareCreate("test-index") - .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - ) - ); - - ensureGreen("test-index"); - - indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); - flushAndRefresh("test-index"); - - final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; - spanExporter.clear(); - - assertAcked( - client().admin() - .indices() - .prepareUpdateSettings("test-index") - .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) - ); - - ensureGreen("test-index"); - - final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) - .findAny() - .orElseThrow(() -> new AssertionError("not found")); - - final List clusterUpdateChildActions = spanExporter.findSpan( - spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) - ).map(SpanData::getName).collect(toList()); - - assertThat( - clusterUpdateChildActions, - hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) - ); - - final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) - .findAny() - .orElseThrow(() -> new AssertionError("not found")); - final List recoveryChildActions = spanExporter.findSpan( - spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) - ).map(SpanData::getName).collect(toList()); - - assertThat( - recoveryChildActions, - hasItems( - PeerRecoveryTargetService.Actions.FILES_INFO, - PeerRecoveryTargetService.Actions.FILE_CHUNK, - PeerRecoveryTargetService.Actions.CLEAN_FILES, - PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, - PeerRecoveryTargetService.Actions.FINALIZE - ) - ); - - } - - public void testSearch() throws Exception { - - internalCluster().ensureAtLeastNumDataNodes(2); - final int nodeCount = internalCluster().numDataNodes(); - - assertAcked( - client().admin() - .indices() - .prepareCreate("test-matching") - .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") - .setSettings( - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) - ) - ); - - assertAcked( - client().admin() - .indices() - .prepareCreate("test-notmatching") - .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") - .setSettings( - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) - ) - ); - - ensureGreen("test-matching", "test-notmatching"); - - final String matchingDate = "2021-11-17"; - final String nonMatchingDate = "2021-01-01"; - - final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - - for (int i = 0; i < 1000; i++) { - final boolean isMatching = randomBoolean(); - final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); - indexRequestBuilder.setSource( - "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", - XContentType.JSON - ); - bulkRequestBuilder.add(indexRequestBuilder); - } - - assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); - - final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; - spanExporter.clear(); - - final Request searchRequest = new Request("GET", "_search"); - searchRequest.addParameter("search_type", "query_then_fetch"); - searchRequest.addParameter("pre_filter_shard_size", "1"); - searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); - searchRequest.setOptions( - searchRequest.getOptions() - .toBuilder() - .addHeader( - "Authorization", - UsernamePasswordToken.basicAuthHeaderValue( - SecuritySettingsSource.TEST_USER_NAME, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) - ) - ) - ); - - final Response searchResponse = getRestClient().performRequest(searchRequest); - - assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); - assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); - } - - public void testDoesNotRecordSpansWhenDisabled() { - - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() - ) - ) - .actionGet(); - - try { - APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - - client().admin().cluster().prepareListTasks().get(); - - assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); - } finally { - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() - ) - ) - .actionGet(); - } - } - - public void testFilterByNameGivenSingleCompleteMatch() { - - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() - ) - ) - .actionGet(); - - APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - - try { - client().admin().cluster().prepareListTasks().get(); - - var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - assertThat(parentTasks, hasSize(1)); - - var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - assertThat(childrenTasks, empty()); - } finally { - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - ) - ) - .actionGet(); - } - } - - public void testFilterByNameGivenSinglePattern() { - - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() - ) - ) - .actionGet(); - - APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - - try { - client().admin().cluster().prepareListTasks().get(); - - var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - assertThat(parentTasks, hasSize(1)); - - var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - assertThat(childrenTasks, hasSize(internalCluster().size())); - } finally { - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - ) - ) - .actionGet(); - } - } - - public void testFilterByNameGivenTwoPatterns() { - - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() - ) - ) - .actionGet(); - - APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - - try { - client().admin().cluster().prepareListTasks().get(); - client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); - - var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); - assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); - } finally { - client().admin() - .cluster() - .updateSettings( - new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - ) - ) - .actionGet(); - } - } +// @Override +// protected Collection> nodePlugins() { +// return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); +// } +// +// @Override +// protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { +// Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); +// // ((MockSecureSettings) builder.getSecureSettings()).setString( +// // APMTracer.APM_ENDPOINT_SETTING.getKey(), +// // System.getProperty("tests.apm.endpoint", "") +// // ); +// // ((MockSecureSettings) builder.getSecureSettings()).setString( +// // APMTracer.APM_TOKEN_SETTING.getKey(), +// // System.getProperty("tests.apm.token", "") +// // ); +// builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); +// return builder.build(); +// } +// +// @Override +// protected boolean addMockHttpTransport() { +// return false; +// } +// +// @After +// public void clearRecordedSpans() { +// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); +// } +// +// public void testModule() { +// List plugins = internalCluster().getAnyMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); +// assertThat(plugins, hasSize(1)); +// +// TransportService transportService = internalCluster().getInstance(TransportService.class); +// final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); +// assertThat(taskTracer, notNullValue()); +// +// final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); +// +// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); +// +// taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); +// taskTracer.onTaskUnregistered(testTask); +// +// final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); +// boolean found = false; +// final Long targetId = testTask.getId(); +// for (SpanData capturedSpan : capturedSpans) { +// if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { +// found = true; +// assertTrue(capturedSpan.hasEnded()); +// } +// } +// assertTrue(found); +// } +// +// public void testRecordsNestedSpans() { +// +// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events +// +// client().admin().cluster().prepareListTasks().get(); +// +// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); +// assertThat(parentTasks, hasSize(1)); +// var parentTask = parentTasks.get(0); +// assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); +// +// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); +// assertThat(childrenTasks, hasSize(internalCluster().size())); +// for (SpanData childrenTask : childrenTasks) { +// assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); +// assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); +// } +// } +// +// public void testRecovery() throws Exception { +// internalCluster().ensureAtLeastNumDataNodes(2); +// +// assertAcked( +// client().admin() +// .indices() +// .prepareCreate("test-index") +// .setSettings( +// Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) +// ) +// ); +// +// ensureGreen("test-index"); +// +// indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); +// flushAndRefresh("test-index"); +// +// final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; +// spanExporter.clear(); +// +// assertAcked( +// client().admin() +// .indices() +// .prepareUpdateSettings("test-index") +// .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) +// ); +// +// ensureGreen("test-index"); +// +// final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) +// .findAny() +// .orElseThrow(() -> new AssertionError("not found")); +// +// final List clusterUpdateChildActions = spanExporter.findSpan( +// spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) +// ).map(SpanData::getName).collect(toList()); +// +// assertThat( +// clusterUpdateChildActions, +// hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) +// ); +// +// final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) +// .findAny() +// .orElseThrow(() -> new AssertionError("not found")); +// final List recoveryChildActions = spanExporter.findSpan( +// spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) +// ).map(SpanData::getName).collect(toList()); +// +// assertThat( +// recoveryChildActions, +// hasItems( +// PeerRecoveryTargetService.Actions.FILES_INFO, +// PeerRecoveryTargetService.Actions.FILE_CHUNK, +// PeerRecoveryTargetService.Actions.CLEAN_FILES, +// PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, +// PeerRecoveryTargetService.Actions.FINALIZE +// ) +// ); +// +// } +// +// public void testSearch() throws Exception { +// +// internalCluster().ensureAtLeastNumDataNodes(2); +// final int nodeCount = internalCluster().numDataNodes(); +// +// assertAcked( +// client().admin() +// .indices() +// .prepareCreate("test-matching") +// .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") +// .setSettings( +// Settings.builder() +// .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) +// .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) +// ) +// ); +// +// assertAcked( +// client().admin() +// .indices() +// .prepareCreate("test-notmatching") +// .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") +// .setSettings( +// Settings.builder() +// .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) +// .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) +// ) +// ); +// +// ensureGreen("test-matching", "test-notmatching"); +// +// final String matchingDate = "2021-11-17"; +// final String nonMatchingDate = "2021-01-01"; +// +// final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); +// +// for (int i = 0; i < 1000; i++) { +// final boolean isMatching = randomBoolean(); +// final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); +// indexRequestBuilder.setSource( +// "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", +// XContentType.JSON +// ); +// bulkRequestBuilder.add(indexRequestBuilder); +// } +// +// assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); +// +// final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; +// spanExporter.clear(); +// +// final Request searchRequest = new Request("GET", "_search"); +// searchRequest.addParameter("search_type", "query_then_fetch"); +// searchRequest.addParameter("pre_filter_shard_size", "1"); +// searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); +// searchRequest.setOptions( +// searchRequest.getOptions() +// .toBuilder() +// .addHeader( +// "Authorization", +// UsernamePasswordToken.basicAuthHeaderValue( +// SecuritySettingsSource.TEST_USER_NAME, +// new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) +// ) +// ) +// ); +// +// final Response searchResponse = getRestClient().performRequest(searchRequest); +// +// assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); +// assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); +// } +// +// public void testDoesNotRecordSpansWhenDisabled() { +// +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() +// ) +// ) +// .actionGet(); +// +// try { +// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); +// +// client().admin().cluster().prepareListTasks().get(); +// +// assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); +// } finally { +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() +// ) +// ) +// .actionGet(); +// } +// } +// +// public void testFilterByNameGivenSingleCompleteMatch() { +// +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() +// ) +// ) +// .actionGet(); +// +// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events +// +// try { +// client().admin().cluster().prepareListTasks().get(); +// +// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); +// assertThat(parentTasks, hasSize(1)); +// +// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); +// assertThat(childrenTasks, empty()); +// } finally { +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() +// ) +// ) +// .actionGet(); +// } +// } +// +// public void testFilterByNameGivenSinglePattern() { +// +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() +// ) +// ) +// .actionGet(); +// +// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events +// +// try { +// client().admin().cluster().prepareListTasks().get(); +// +// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); +// assertThat(parentTasks, hasSize(1)); +// +// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); +// assertThat(childrenTasks, hasSize(internalCluster().size())); +// } finally { +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() +// ) +// ) +// .actionGet(); +// } +// } +// +// public void testFilterByNameGivenTwoPatterns() { +// +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() +// ) +// ) +// .actionGet(); +// +// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events +// +// try { +// client().admin().cluster().prepareListTasks().get(); +// client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); +// +// var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); +// assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); +// } finally { +// client().admin() +// .cluster() +// .updateSettings( +// new ClusterUpdateSettingsRequest().persistentSettings( +// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() +// ) +// ) +// .actionGet(); +// } +// } } diff --git a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties index 1a4cf53362d08..8f02ac380b111 100644 --- a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties +++ b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties @@ -13,6 +13,7 @@ enable_experimental_instrumentations: true # secret_token: service_name: elasticsearch +# service_node_name: node1 environment: dev diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index f5ad73d1dabf7..283543546fc8f 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -20,19 +20,15 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.List; -import java.util.Set; import java.util.function.Supplier; public class APM extends Plugin implements NetworkPlugin { - public static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT_HTTP_HEADER, Task.TRACE_STATE); - private final SetOnce tracer = new SetOnce<>(); private final Settings settings; @@ -54,7 +50,7 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - tracer.set(new APMTracer(settings, threadPool, clusterService)); + tracer.set(new APMTracer(settings, clusterService)); return List.of(tracer.get()); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index cda1b91c3b44d..90332bea50c69 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -14,6 +14,7 @@ import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import io.opentelemetry.context.propagation.TextMapGetter; import org.apache.logging.log4j.LogManager; @@ -27,7 +28,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.tracing.Traceable; import java.security.AccessController; @@ -44,12 +44,13 @@ import static org.elasticsearch.common.settings.Setting.Property.Dynamic; import static org.elasticsearch.common.settings.Setting.Property.NodeScope; -import static org.elasticsearch.xpack.apm.APM.TRACE_HEADERS; public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); + private static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT_HTTP_HEADER, Task.TRACE_STATE); + static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( "xpack.apm.tracing.names.include", @@ -59,8 +60,18 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic NodeScope ); - private final Map spans = ConcurrentCollections.newConcurrentMap(); - private final ThreadPool threadPool; + private record ContextScope(Context context, Scope scope) { + Span span() { + return Span.fromContextOrNull(this.context); + } + + void close() { + this.span().end(); + this.scope.close(); + } + } + + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ClusterService clusterService; private volatile boolean enabled; @@ -73,8 +84,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic */ private record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} - public APMTracer(Settings settings, ThreadPool threadPool, ClusterService clusterService) { - this.threadPool = Objects.requireNonNull(threadPool); + public APMTracer(Settings settings, ClusterService clusterService) { this.clusterService = Objects.requireNonNull(clusterService); this.enabled = APM_ENABLED_SETTING.get(settings); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); @@ -143,94 +153,110 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { return; } - spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { + spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - Context parentContext = getParentSpanContext(); + + // https://github.com/open-telemetry/opentelemetry-java/discussions/2884#discussioncomment-381870 + // If you just want to propagate across threads within the same process, you don't need context propagators (extract/inject). + // You can just pass the Context object directly to another thread (it is immutable and thus thread-safe). + + // local parent first, remote parent as fallback + Context parentContext = getLocalParentContext(threadContext); + if (parentContext == null) { + parentContext = getRemoteParentContext(threadContext); + } if (parentContext != null) { spanBuilder.setParent(parentContext); } - for (Map.Entry entry : traceable.getAttributes().entrySet()) { - final Object value = entry.getValue(); - if (value instanceof String) { - spanBuilder.setAttribute(entry.getKey(), (String) value); - } else if (value instanceof Long) { - spanBuilder.setAttribute(entry.getKey(), (Long) value); - } else if (value instanceof Integer) { - spanBuilder.setAttribute(entry.getKey(), (Integer) value); - } else if (value instanceof Double) { - spanBuilder.setAttribute(entry.getKey(), (Double) value); - } else if (value instanceof Boolean) { - spanBuilder.setAttribute(entry.getKey(), (Boolean) value); - } else { - throw new IllegalArgumentException( - "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" - ); - } - } - - final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); - spanBuilder.setSpanKind(isHttpSpan ? SpanKind.SERVER : SpanKind.INTERNAL); - - spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); - spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); - - final String xOpaqueId = threadPool.getThreadContext().getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); - if (xOpaqueId != null) { - spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); - } + setSpanAttributes(threadContext, traceable, spanBuilder); final Span span = spanBuilder.startSpan(); + final Context contextForNewSpan = Context.current().with(span); + final Scope scope = contextForNewSpan.makeCurrent(); final Map spanHeaders = new HashMap<>(); - final Context contextForNewSpan = Context.current().with(span); services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); + // The span context can be used as the parent context directly within the same Java process + threadContext.putTransient(Task.APM_TRACE_CONTEXT, contextForNewSpan); + // Whereas for tasks sent to other ES nodes, we need to put trace headers into the threadContext so that they can be + // propagated threadContext.putHeader(spanHeaders); -// logGraphviz(span); - - return span; + return new ContextScope(contextForNewSpan, scope); })); } + private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, SpanBuilder spanBuilder) { + for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Object value = entry.getValue(); + if (value instanceof String) { + spanBuilder.setAttribute(entry.getKey(), (String) value); + } else if (value instanceof Long) { + spanBuilder.setAttribute(entry.getKey(), (Long) value); + } else if (value instanceof Integer) { + spanBuilder.setAttribute(entry.getKey(), (Integer) value); + } else if (value instanceof Double) { + spanBuilder.setAttribute(entry.getKey(), (Double) value); + } else if (value instanceof Boolean) { + spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + } else { + throw new IllegalArgumentException( + "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" + ); + } + } + + final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); + spanBuilder.setSpanKind(isHttpSpan ? SpanKind.SERVER : SpanKind.INTERNAL); + + spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); + spanBuilder.setAttribute(Traceable.AttributeKeys.CLUSTER_NAME, clusterService.getClusterName().value()); + + final String xOpaqueId = threadContext.getHeader(Task.X_OPAQUE_ID_HTTP_HEADER); + if (xOpaqueId != null) { + spanBuilder.setAttribute("es.x-opaque-id", xOpaqueId); + } + } + @Override public void onTraceException(Traceable traceable, Throwable throwable) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.recordException(throwable); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().recordException(throwable); } } @Override public void setAttribute(Traceable traceable, String key, boolean value) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.setAttribute(key, value); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, double value) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.setAttribute(key, value); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, long value) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.setAttribute(key, value); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, String value) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.setAttribute(key, value); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().setAttribute(key, value); } } @@ -240,9 +266,11 @@ private boolean isSpanNameIncluded(String name) { return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); } - private Context getParentSpanContext() { - // Check for a parent context in the thread context. - final ThreadContext threadContext = threadPool.getThreadContext(); + private Context getLocalParentContext(ThreadContext threadContext) { + return threadContext.getTransient("parent_" + Task.APM_TRACE_CONTEXT); + } + + private Context getRemoteParentContext(ThreadContext threadContext) { final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); @@ -262,17 +290,17 @@ private Context getParentSpanContext() { @Override public void onTraceStopped(Traceable traceable) { - final var span = spans.remove(traceable.getSpanId()); - if (span != null) { - span.end(); + final var contextScope = spans.remove(traceable.getSpanId()); + if (contextScope != null) { + contextScope.close(); } } @Override public void onTraceEvent(Traceable traceable, String eventName) { - final var span = spans.get(traceable.getSpanId()); - if (span != null) { - span.addEvent(eventName); + final var contextScope = spans.get(traceable.getSpanId()); + if (contextScope != null) { + contextScope.span().addEvent(eventName); } } @@ -294,43 +322,4 @@ private static boolean isSupportedContextKey(String key) { } private static final Set GRAPHVIZ_CACHE = new HashSet<>(); - - private static void logGraphviz(Span span) { - final String spanStr = span.toString(); - - int i = spanStr.indexOf("spanId="); - int j = spanStr.indexOf(",", i); - String spanId = spanStr.substring(i + 7, j); - - String parentSpanId = null; - i = spanStr.indexOf("spanId=", j); - if (i > -1) { - j = spanStr.indexOf(",", i); - parentSpanId = spanStr.substring(i + 7, j); - } - - i = spanStr.indexOf("name=", j); - j = spanStr.indexOf(",", i); - String spanName = spanStr.substring(i + 5, j); - - if (spanName.startsWith("internal:") == false) { - if (GRAPHVIZ_CACHE.add(spanId)) { - Map attrs = new HashMap<>(); - attrs.put("label", spanName); - if (spanName.startsWith("internal:")) { - attrs.put("style", "filled"); - attrs.put("fillcolor", "pink"); - } - final String attrsString = attrs.entrySet() - .stream() - .map(each -> each.getKey() + "=\"" + each.getValue() + "\"") - .collect(Collectors.joining(",")); - LOGGER.warn("BADGER: __{} [{}]", spanId, attrsString); - } - - if (parentSpanId != null) { - LOGGER.warn("BADGER: __{} -> __{}", spanId, parentSpanId); - } - } - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 40caa05cbba4d..26dccdac46829 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -402,7 +402,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { Map> transports = new HashMap<>(); filterPlugins(NetworkPlugin.class).stream() @@ -418,7 +418,7 @@ public Map> getHttpTransports( networkService, dispatcher, clusterSettings, - null + tracer ) ) ); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java deleted file mode 100644 index 9d1b9c8cf4ba3..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/AuthorizationTracer.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.tracing.Traceable; -import org.elasticsearch.tracing.Tracer; - -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; - -public class AuthorizationTracer { - - private static final Logger logger = LogManager.getLogger(AuthorizationTracer.class); - - private final ThreadContext threadContext; - private final List tracers = new CopyOnWriteArrayList<>(); - - public AuthorizationTracer(ThreadContext threadContext) { - this.threadContext = threadContext; - } - - public void addTracer(Tracer tracer) { - if (tracer != null) { - tracers.add(tracer); - } - } - - public Runnable startTracing(Traceable traceable) { - for (Tracer tracer : tracers) { - try { - tracer.onTraceStarted(threadContext, traceable); - } catch (Exception e) { - assert false : e; - logger.warn( - new ParameterizedMessage( - "authorization tracing listener [{}] failed on starting tracing of [{}][{}]", - tracer, - traceable.getSpanId(), - traceable.getSpanName() - ), - e - ); - } - } - return () -> { - for (Tracer tracer : tracers) { - try { - tracer.onTraceStopped(traceable); - } catch (Exception e) { - assert false : e; - logger.warn( - new ParameterizedMessage( - "authorization tracing listener [{}] failed on stopping tracing of [{}][{}]", - tracer, - traceable.getSpanId(), - traceable.getSpanName() - ), - e - ); - } - } - }; - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index e777292c9fea8..d3e75b5d5fd1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -480,7 +480,6 @@ public class Security extends Plugin private final List securityExtensions = new ArrayList<>(); private final SetOnce transportReference = new SetOnce<>(); private final SetOnce scriptServiceReference = new SetOnce<>(); - private final SetOnce authorizationTracerReference = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -841,7 +840,6 @@ Collection createComponents( } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); - authorizationTracerReference.set(new AuthorizationTracer(threadContext.get())); final AuthorizationService authzService = new AuthorizationService( settings, allRolesStore, @@ -855,8 +853,7 @@ Collection createComponents( getLicenseState(), expressionResolver, operatorPrivilegesService, - restrictedIndices, - authorizationTracerReference.get() + restrictedIndices ); components.add(nativeRolesStore); // used by roles actions @@ -1498,7 +1495,7 @@ public Map> getHttpTransports( NetworkService networkService, HttpServerTransport.Dispatcher dispatcher, ClusterSettings clusterSettings, - List tracers + Tracer tracer ) { if (enabled == false) { // don't register anything if we are not enabled return Collections.emptyMap(); @@ -1518,7 +1515,7 @@ public Map> getHttpTransports( dispatcher, clusterSettings, getNettySharedGroupFactory(settings), - tracers + tracer ) ); @@ -1641,15 +1638,6 @@ public void loadExtensions(ExtensionLoader loader) { securityExtensions.addAll(loader.loadExtensions(SecurityExtension.class)); } - @Override - public void onTracers(List tracers) { - if (authorizationTracerReference.get() == null) { - // security is disabled - return; - } - tracers.forEach(t -> authorizationTracerReference.get().addTracer(t)); - } - private synchronized SharedGroupFactory getNettySharedGroupFactory(Settings settings) { if (sharedGroupFactory.get() != null) { assert sharedGroupFactory.get().getSettings().equals(settings) : "Different settings than originally provided"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 45137b7735abe..aa3f53161c8b3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.tracing.Traceable; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.action.apikey.QueryApiKeyAction; @@ -71,7 +70,6 @@ import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.AuthorizationTracer; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; @@ -111,7 +109,6 @@ public class AuthorizationService { true, Property.NodeScope ); - public static final Setting TRACE_AUTHORIZATION = Setting.boolSetting(setting("authz.tracing"), false, Property.NodeScope); private static final AuthorizationInfo SYSTEM_AUTHZ_INFO = () -> Collections.singletonMap( PRINCIPAL_ROLES_FIELD_NAME, new String[] { SystemUser.ROLE_NAME } @@ -137,8 +134,6 @@ public class AuthorizationService { private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; - private final AuthorizationTracer authorizationTracer; - private final boolean tracingEnabled; public AuthorizationService( Settings settings, @@ -153,8 +148,7 @@ public AuthorizationService( XPackLicenseState licenseState, IndexNameExpressionResolver resolver, OperatorPrivilegesService operatorPrivilegesService, - RestrictedIndices restrictedIndices, - AuthorizationTracer authorizationTracer + RestrictedIndices restrictedIndices ) { this.clusterService = clusterService; this.auditTrailService = auditTrailService; @@ -175,8 +169,6 @@ public AuthorizationService( this.settings = settings; this.licenseState = licenseState; this.operatorPrivilegesService = operatorPrivilegesService; - this.authorizationTracer = authorizationTracer; - this.tracingEnabled = TRACE_AUTHORIZATION.get(settings); } public void checkPrivileges( @@ -230,51 +222,44 @@ public void authorize( * Therefore we begin by clearing the existing ones up, as they might already be set during the authorization of a * previous parent action that ran under the same thread context (also on the same node). * When the returned {@code StoredContext} is closed, ALL the original headers are restored. - * - * We also clear tracing-related headers */ try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false, ACTION_SCOPE_AUTHORIZATION_KEYS)) { - try (var ignored = maybeStartTracing(enclosingContext, authentication, action, originalRequest)) { - // this does not clear {@code AuthorizationServiceField.ORIGINATING_ACTION_KEY} - // prior to doing any authorization lets set the originating action in the thread context - // the originating action is the current action if no originating action has yet been set in the current thread context - // if there is already an original action, that stays put (eg. the current action is a child action) - putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); - - final String auditId; - try { - auditId = requireAuditId(authentication, action, originalRequest); - } catch (ElasticsearchSecurityException e) { - listener.onFailure(e); - return; - } + // this does not clear {@code AuthorizationServiceField.ORIGINATING_ACTION_KEY} + // prior to doing any authorization lets set the originating action in the thread context + // the originating action is the current action if no originating action has yet been set in the current thread context + // if there is already an original action, that stays put (eg. the current action is a child action) + putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); + + final String auditId; + try { + auditId = requireAuditId(authentication, action, originalRequest); + } catch (ElasticsearchSecurityException e) { + listener.onFailure(e); + return; + } - // sometimes a request might be wrapped within another, which is the case for proxied - // requests and concrete shard requests - final TransportRequest unwrappedRequest = maybeUnwrapRequest(authentication, originalRequest, action, auditId); + // sometimes a request might be wrapped within another, which is the case for proxied + // requests and concrete shard requests + final TransportRequest unwrappedRequest = maybeUnwrapRequest(authentication, originalRequest, action, auditId); - try { - checkOperatorPrivileges(authentication, action, originalRequest); - } catch (ElasticsearchException e) { - listener.onFailure(e); - return; - } + try { + checkOperatorPrivileges(authentication, action, originalRequest); + } catch (ElasticsearchException e) { + listener.onFailure(e); + return; + } - if (SystemUser.is(authentication.getUser())) { - // this never goes async so no need to wrap the listener - authorizeSystemUser(authentication, action, auditId, unwrappedRequest, listener); - } else { - final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); - final AuthorizationEngine engine = getAuthorizationEngine(authentication); - final ActionListener authzInfoListener = wrapPreservingContext( - ActionListener.wrap(authorizationInfo -> { - threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); - maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); - }, listener::onFailure), - threadContext - ); - engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); - } + if (SystemUser.is(authentication.getUser())) { + // this never goes async so no need to wrap the listener + authorizeSystemUser(authentication, action, auditId, unwrappedRequest, listener); + } else { + final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action, enclosingContext); + final AuthorizationEngine engine = getAuthorizationEngine(authentication); + final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap(authorizationInfo -> { + threadContext.putTransient(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, auditId, authorizationInfo, listener); + }, listener::onFailure), threadContext); + engine.resolveAuthorizationInfo(requestInfo, authzInfoListener); } } } @@ -329,59 +314,6 @@ private static ElasticsearchSecurityException internalError(String message) { return new ElasticsearchSecurityException(message); } - // FIXME this isn't actually what we return but AutClose#close() declares an exception - private ThreadContext.StoredContext maybeStartTracing( - AuthorizationContext enclosingContext, - Authentication authentication, - String action, - TransportRequest originalRequest - ) { - // Not tracing system actions. Also if we're not tracing system actions, we mustn't start a new trace context. - if (shouldTrace(authentication) == false) { - return () -> {}; - } - - final ThreadContext.StoredContext context = threadContext.newTraceContext(); - final Runnable stopTracing = authorizationTracer.startTracing(new Traceable() { - @Override - public String getSpanId() { - return "authorize_" + System.identityHashCode(originalRequest); - } - - @Override - public String getSpanName() { - return "authorize(" + action + ")"; - } - - @Override - public Map getAttributes() { - final HashMap attributes = new HashMap<>( - Map.of( - "es.principal", - authentication.getUser().principal(), - "es.authentication.realm.name", - authentication.getAuthenticatedBy().getName(), - "es.node.name", - clusterService.getNodeName() - ) - ); - if (enclosingContext != null) { - attributes.put("originating_action", enclosingContext.getAction()); - } - return Map.copyOf(attributes); - } - }); - - return () -> { - context.restore(); - stopTracing.run(); - }; - } - - private boolean shouldTrace(Authentication authentication) { - return false == (false == tracingEnabled || SystemUser.is(authentication.getUser()) || threadContext.isSystemContext()); - } - private void checkOperatorPrivileges(Authentication authentication, String action, TransportRequest originalRequest) throws ElasticsearchSecurityException { // Check operator privileges @@ -1083,7 +1015,6 @@ public void getAsync(ActionListener listener) { public static void addSettings(List> settings) { settings.add(ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING); - settings.add(TRACE_AUTHORIZATION); settings.addAll(LoadAuthorizedIndicesTimeChecker.Factory.getSettings()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index 06ca3d46aaf60..2c14f35b08b9e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -52,9 +52,9 @@ public SecurityNetty4HttpServerTransport( Dispatcher dispatcher, ClusterSettings clusterSettings, SharedGroupFactory sharedGroupFactory, - List tracers + Tracer tracer ) { - super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory, tracers); + super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher, clusterSettings, sharedGroupFactory, tracer); this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e)); this.ipFilter = ipFilter; final boolean ssl = HTTP_SSL_ENABLED.get(settings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java index ce114d4605967..0c9b83727ebd5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java @@ -122,7 +122,7 @@ protected Class> } @Override - public void onTracers(List tracers) { - plugins.forEach(plugin -> plugin.onTracers(tracers)); + public void onTracer(Tracer tracer) { + plugins.forEach(plugin -> plugin.onTracer(tracer)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 99d33fbc72f4a..b633a7efe7035 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -704,7 +704,6 @@ public void testSecurityRestHandlerWrapperCanBeInstalled() throws IllegalAccessE null, null, usageService, - null, null ); actionModule.initRestHandlers(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 3050c465d0475..e7c9a605bb1b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -154,7 +154,6 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.AuthorizationTracer; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; @@ -295,8 +294,7 @@ public void setup() { licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES, - new AuthorizationTracer(threadContext) + RESTRICTED_INDICES ); } @@ -1570,8 +1568,7 @@ public void testDenialForAnonymousUser() throws IOException { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES, - new AuthorizationTracer(threadContext) + RESTRICTED_INDICES ); RoleDescriptor role = new RoleDescriptor( @@ -1618,8 +1615,7 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() throws IO new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES, - new AuthorizationTracer(threadContext) + RESTRICTED_INDICES ); RoleDescriptor role = new RoleDescriptor( @@ -2767,8 +2763,7 @@ public void getUserPrivileges( licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES, - new AuthorizationTracer(threadContext) + RESTRICTED_INDICES ); Authentication authentication; try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { From d2efa7e8778418585e163a87f9f0c95e9af558d0 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 18 Apr 2022 16:28:48 +0100 Subject: [PATCH 60/90] Fixes --- .../Netty4HttpServerPipeliningTests.java | 3 ++- .../Netty4HttpServerTransportTests.java | 15 ++++++------ .../common/util/concurrent/ThreadContext.java | 22 ----------------- .../node/tasks/TaskManagerTestCase.java | 8 ++++--- .../util/concurrent/ThreadContextTests.java | 24 ------------------- .../AbstractHttpServerTransportTests.java | 13 +++++----- .../service/FakeThreadPoolMasterService.java | 4 +--- .../test/ClusterServiceUtils.java | 3 ++- .../disruption/DisruptableMockTransport.java | 3 ++- .../test/tasks/MockTaskManager.java | 3 ++- .../test/transport/MockTransport.java | 4 +++- .../test/transport/MockTransportService.java | 4 ++-- .../action/RestTermsEnumActionTests.java | 3 ++- ...ecurityNetty4HttpServerTransportTests.java | 15 ++++++------ 14 files changed, 44 insertions(+), 80 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 90b93ce1771b9..6de229bddf7dd 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.junit.After; import org.junit.Before; @@ -112,7 +113,7 @@ class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { new NullDispatcher(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index d706a4f1631aa..e3c47e537a9b9 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -60,6 +60,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.NettyAllocator; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.junit.After; @@ -176,7 +177,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, dispatcher, clusterSettings, new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ) ) { transport.start(); @@ -226,7 +227,7 @@ public void testBindUnavailableAddress() { new NullDispatcher(), clusterSettings, new SharedGroupFactory(Settings.EMPTY), - List.of() + Tracer.NOOP ) ) { transport.start(); @@ -245,7 +246,7 @@ public void testBindUnavailableAddress() { new NullDispatcher(), clusterSettings, new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ) ) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, otherTransport::start); @@ -298,7 +299,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th dispatcher, clusterSettings, new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ) ) { transport.start(); @@ -361,7 +362,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th dispatcher, clusterSettings, new SharedGroupFactory(Settings.EMPTY), - List.of() + Tracer.NOOP ) ) { transport.start(); @@ -431,7 +432,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th dispatcher, randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ) ) { transport.start(); @@ -505,7 +506,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th dispatcher, randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ) ) { transport.start(); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 49d6b487b5bbf..8cb58b4f452e0 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -265,28 +265,6 @@ public StoredContext stashAndMergeHeaders(Map headers) { return () -> threadLocal.set(context); } - /** - * Removes the current context and resets a new context that is a copy of the current one except that the request - * headers do not contain the given headers to remove. The removed context can be restored when closing the returned - * {@link StoredContext}. - * @param headersToRemove the request headers to remove - */ - public StoredContext removeRequestHeaders(Set headersToRemove) { - final ThreadContextStruct context = threadLocal.get(); - Map newRequestHeaders = new HashMap<>(context.requestHeaders); - newRequestHeaders.keySet().removeAll(headersToRemove); - threadLocal.set( - new ThreadContextStruct( - newRequestHeaders, - context.responseHeaders, - context.transientHeaders, - context.isSystemContext, - context.warningHeadersSize - ) - ); - return () -> threadLocal.set(context); - } - /** * Just like {@link #stashContext()} but no default context is set. * @param preserveResponseHeaders if set to true the response headers of the restore thread will be preserved. diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 8a5a290b9e5ad..d2d4e0a19c2c7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -39,6 +39,7 @@ import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.AbstractSimpleTransportTestCase; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportService; @@ -194,14 +195,15 @@ public TestNode(String name, ThreadPool threadPool, Settings settings) { TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddressDiscoveryNodeFunction, null, - Collections.emptySet() + Collections.emptySet(), + Tracer.NOOP ) { @Override - protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { + protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { return new MockTaskManager(settings, threadPool, taskHeaders); } else { - return super.createTaskManager(settings, threadPool, taskHeaders); + return super.createTaskManager(settings, threadPool, taskHeaders, tracer); } } }; diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index f4bff2c8f0518..34474b110f1e8 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -181,30 +181,6 @@ public void testStashAndMerge() { assertEquals("1", threadContext.getHeader("default")); } - public void testRemoveHeaders() { - Settings build = Settings.builder().put("request.headers.default", "1").build(); - ThreadContext threadContext = new ThreadContext(build); - threadContext.putHeader("h_1", "h_1_value"); - threadContext.putHeader("h_2", "h_2_value"); - threadContext.putHeader("h_3", "h_3_value"); - - threadContext.putTransient("ctx.transient_1", 1); - threadContext.addResponseHeader("resp.header", "baaaam"); - try (ThreadContext.StoredContext ctx = threadContext.removeRequestHeaders(Set.of("h_1", "h_3"))) { - assertThat(threadContext.getHeaders(), equalTo(Map.of("default", "1", "h_2", "h_2_value"))); - assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.transient_1")); - assertEquals("1", threadContext.getHeader("default")); - assertEquals(1, threadContext.getResponseHeaders().get("resp.header").size()); - assertEquals("baaaam", threadContext.getResponseHeaders().get("resp.header").get(0)); - } - - assertThat(threadContext.getHeaders(), equalTo(Map.of("default", "1", "h_1", "h_1_value", "h_2", "h_2_value", "h_3", "h_3_value"))); - assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.transient_1")); - assertEquals("1", threadContext.getHeader("default")); - assertEquals(1, threadContext.getResponseHeaders().get("resp.header").size()); - assertEquals("baaaam", threadContext.getResponseHeaders().get("resp.header").get(0)); - } - public void testStoreContext() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index f7e93bb69a61e..c7a0eb9c8702a 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.junit.After; @@ -162,7 +163,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th xContentRegistry(), dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - List.of() + Tracer.NOOP ) { @Override @@ -277,7 +278,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - List.of() + Tracer.NOOP ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -328,7 +329,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - List.of() + Tracer.NOOP ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -482,7 +483,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - List.of() + Tracer.NOOP ) { @Override protected HttpServerChannel bind(InetSocketAddress hostAddress) { @@ -539,7 +540,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - List.of() + Tracer.NOOP ) { @Override @@ -615,7 +616,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, } }, clusterSettings, - List.of() + Tracer.NOOP ) { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java b/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java index 935eefd633d03..b9dfd40884f34 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/service/FakeThreadPoolMasterService.java @@ -22,7 +22,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.node.Node; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.tracing.Tracer; import java.util.ArrayList; import java.util.List; @@ -51,8 +50,7 @@ public FakeThreadPoolMasterService( super( Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), nodeName).build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - threadPool, - Tracer.NOOP + threadPool ); this.name = serviceName; this.onTaskAvailableToRun = onTaskAvailableToRun; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index 6fafad6b9c7b2..ed853363b0bc3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -35,6 +35,7 @@ import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import java.util.Collections; import java.util.concurrent.CountDownLatch; @@ -127,7 +128,7 @@ public static ClusterService createClusterService(ThreadPool threadPool, Discove clusterService.getClusterApplierService().setInitialState(initialClusterState); clusterService.getMasterService().setClusterStatePublisher(createClusterStatePublisher(clusterService.getClusterApplierService())); clusterService.getMasterService().setClusterStateSupplier(clusterService.getClusterApplierService()::state); - clusterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())); + clusterService.setTaskManager(new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet(), Tracer.NOOP)); clusterService.start(); return clusterService; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/DisruptableMockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/DisruptableMockTransport.java index 57ee3f376c3d0..6b8c2249ca5a1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/DisruptableMockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/DisruptableMockTransport.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -76,7 +77,7 @@ public TransportService createTransportService( @Nullable ClusterSettings clusterSettings, Set taskHeaders ) { - return new TransportService(settings, this, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders); + return new TransportService(settings, this, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders, Tracer.NOOP); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index 86ea9cae12fbe..eadb9bdd9bcde 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import java.util.Collection; import java.util.Set; @@ -40,7 +41,7 @@ public class MockTaskManager extends TaskManager { private final Collection listeners = new CopyOnWriteArrayList<>(); public MockTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { - super(settings, threadPool, taskHeaders); + super(settings, threadPool, taskHeaders, Tracer.NOOP); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index b19c989d5bdc3..3a9bf8d8129c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.RemoteTransportException; @@ -70,7 +71,8 @@ public TransportService createTransportService( localNodeFactory, clusterSettings, taskHeaders, - connectionManager + connectionManager, + Tracer.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 96218ba9ea2fa..26f78e25762da 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -248,11 +248,11 @@ private static TransportAddress[] extractTransportAddresses(TransportService tra } @Override - protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { + protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) { return new MockTaskManager(settings, threadPool, taskHeaders); } else { - return super.createTaskManager(settings, threadPool, taskHeaders); + return super.createTaskManager(settings, threadPool, taskHeaders, tracer); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index cc67e727aa6e1..ef55bc02e05d9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.usage.UsageService; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -62,7 +63,7 @@ public class RestTermsEnumActionTests extends ESTestCase { */ @BeforeClass public static void stubTermEnumAction() { - final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet(), Tracer.NOOP); final TransportAction transportAction = new TransportAction<>( TermsEnumAction.NAME, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index 4f5aa6e93983b..e4dce91484e42 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.http.AbstractHttpServerTransportTestCase; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -78,7 +79,7 @@ public void testDefaultClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); @@ -105,7 +106,7 @@ public void testOptionalClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); @@ -132,7 +133,7 @@ public void testRequiredClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); @@ -159,7 +160,7 @@ public void testNoClientAuth() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); ChannelHandler handler = transport.configureServerChannelHandler(); final EmbeddedChannel ch = new EmbeddedChannel(handler); @@ -181,7 +182,7 @@ public void testCustomSSLConfiguration() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); ChannelHandler handler = transport.configureServerChannelHandler(); EmbeddedChannel ch = new EmbeddedChannel(handler); @@ -204,7 +205,7 @@ public void testCustomSSLConfiguration() throws Exception { new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); handler = transport.configureServerChannelHandler(); ch = new EmbeddedChannel(handler); @@ -236,7 +237,7 @@ public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Excep new NullDispatcher(), randomClusterSettings(), new SharedGroupFactory(settings), - List.of() + Tracer.NOOP ); assertNotNull(transport.configureServerChannelHandler()); } From 6bb80b6f820acb9b81edeb8c527c15d6b0204139 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 18 Apr 2022 16:31:38 +0100 Subject: [PATCH 61/90] Formatting --- .../tools/launchers/JvmOptionsParser.java | 2 +- .../Netty4HttpServerTransportTests.java | 1 - .../cluster/service/ClusterService.java | 1 - .../common/util/concurrent/ThreadContext.java | 8 +- .../java/org/elasticsearch/node/Node.java | 6 +- .../util/concurrent/ThreadContextTests.java | 1 - .../java/org/elasticsearch/node/MockNode.java | 10 +- .../org/elasticsearch/xpack/apm/ApmIT.java | 672 +++++++++--------- .../xpack/apm/TestOpenTelemetry.java | 8 +- .../TransportSubmitAsyncSearchAction.java | 6 +- .../action/InternalExecutePolicyAction.java | 20 +- .../SecurityNetty4HttpServerTransport.java | 2 - ...ecurityNetty4HttpServerTransportTests.java | 1 - 13 files changed, 356 insertions(+), 382 deletions(-) diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index d3dd246783269..142c8bf7071c7 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -81,7 +81,7 @@ public static void main(final String[] args) throws InterruptedException, IOExce if (environmentPathConf != null) { substitutions.put("ES_PATH_CONF", environmentPathConf); } else { - substitutions.put("ES_PATH_CONF", System.getenv("ES_HOME") + File.separator + "config"); + substitutions.put("ES_PATH_CONF", System.getenv("ES_HOME") + File.separator + "config"); } try { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index e3c47e537a9b9..bc2217272953f 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -70,7 +70,6 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collections; -import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index ad10b7f6e9e8b..557498a31fa27 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -28,7 +28,6 @@ import org.elasticsearch.node.Node; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.tracing.Tracer; public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 8cb58b4f452e0..4cee978b74435 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -734,13 +734,7 @@ private ThreadContextStruct putResponse( return this; } } - return new ThreadContextStruct( - requestHeaders, - newResponseHeaders, - transientHeaders, - isSystemContext, - newWarningHeaderSize - ); + return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize); } private ThreadContextStruct putTransient(String key, Object value) { diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 294f755dfc4a6..5e0ff12f75daa 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -214,6 +214,7 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; import java.util.stream.Stream; + import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; @@ -1049,10 +1050,7 @@ protected Node( } private Tracer getTracer(Collection pluginComponents) { - final List tracers = pluginComponents.stream() - .map(c -> c instanceof Tracer t ? t : null) - .filter(Objects::nonNull) - .toList(); + final List tracers = pluginComponents.stream().map(c -> c instanceof Tracer t ? t : null).filter(Objects::nonNull).toList(); if (tracers.size() > 1) { throw new IllegalStateException("A single Tracer was expected but got: " + tracers); diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index 34474b110f1e8..d38ecff84168c 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -19,7 +19,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 3e3a93c224bdb..90332b9cbe1b9 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -198,15 +198,7 @@ protected TransportService newTransportService( tracer ); } else { - return new MockTransportService( - settings, - transport, - threadPool, - interceptor, - localNodeFactory, - clusterSettings, - taskHeaders - ); + return new MockTransportService(settings, transport, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders); } } diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java index d52caf6286569..73c18d2953521 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -57,340 +57,340 @@ public class ApmIT extends SecurityIntegTestCase { -// @Override -// protected Collection> nodePlugins() { -// return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); -// } -// -// @Override -// protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { -// Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); -// // ((MockSecureSettings) builder.getSecureSettings()).setString( -// // APMTracer.APM_ENDPOINT_SETTING.getKey(), -// // System.getProperty("tests.apm.endpoint", "") -// // ); -// // ((MockSecureSettings) builder.getSecureSettings()).setString( -// // APMTracer.APM_TOKEN_SETTING.getKey(), -// // System.getProperty("tests.apm.token", "") -// // ); -// builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); -// return builder.build(); -// } -// -// @Override -// protected boolean addMockHttpTransport() { -// return false; -// } -// -// @After -// public void clearRecordedSpans() { -// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); -// } -// -// public void testModule() { -// List plugins = internalCluster().getAnyMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); -// assertThat(plugins, hasSize(1)); -// -// TransportService transportService = internalCluster().getInstance(TransportService.class); -// final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); -// assertThat(taskTracer, notNullValue()); -// -// final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); -// -// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); -// -// taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); -// taskTracer.onTaskUnregistered(testTask); -// -// final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); -// boolean found = false; -// final Long targetId = testTask.getId(); -// for (SpanData capturedSpan : capturedSpans) { -// if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { -// found = true; -// assertTrue(capturedSpan.hasEnded()); -// } -// } -// assertTrue(found); -// } -// -// public void testRecordsNestedSpans() { -// -// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events -// -// client().admin().cluster().prepareListTasks().get(); -// -// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); -// assertThat(parentTasks, hasSize(1)); -// var parentTask = parentTasks.get(0); -// assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); -// -// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); -// assertThat(childrenTasks, hasSize(internalCluster().size())); -// for (SpanData childrenTask : childrenTasks) { -// assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); -// assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); -// } -// } -// -// public void testRecovery() throws Exception { -// internalCluster().ensureAtLeastNumDataNodes(2); -// -// assertAcked( -// client().admin() -// .indices() -// .prepareCreate("test-index") -// .setSettings( -// Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) -// ) -// ); -// -// ensureGreen("test-index"); -// -// indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); -// flushAndRefresh("test-index"); -// -// final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; -// spanExporter.clear(); -// -// assertAcked( -// client().admin() -// .indices() -// .prepareUpdateSettings("test-index") -// .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) -// ); -// -// ensureGreen("test-index"); -// -// final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) -// .findAny() -// .orElseThrow(() -> new AssertionError("not found")); -// -// final List clusterUpdateChildActions = spanExporter.findSpan( -// spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) -// ).map(SpanData::getName).collect(toList()); -// -// assertThat( -// clusterUpdateChildActions, -// hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) -// ); -// -// final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) -// .findAny() -// .orElseThrow(() -> new AssertionError("not found")); -// final List recoveryChildActions = spanExporter.findSpan( -// spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) -// ).map(SpanData::getName).collect(toList()); -// -// assertThat( -// recoveryChildActions, -// hasItems( -// PeerRecoveryTargetService.Actions.FILES_INFO, -// PeerRecoveryTargetService.Actions.FILE_CHUNK, -// PeerRecoveryTargetService.Actions.CLEAN_FILES, -// PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, -// PeerRecoveryTargetService.Actions.FINALIZE -// ) -// ); -// -// } -// -// public void testSearch() throws Exception { -// -// internalCluster().ensureAtLeastNumDataNodes(2); -// final int nodeCount = internalCluster().numDataNodes(); -// -// assertAcked( -// client().admin() -// .indices() -// .prepareCreate("test-matching") -// .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") -// .setSettings( -// Settings.builder() -// .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) -// .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) -// ) -// ); -// -// assertAcked( -// client().admin() -// .indices() -// .prepareCreate("test-notmatching") -// .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") -// .setSettings( -// Settings.builder() -// .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) -// .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) -// ) -// ); -// -// ensureGreen("test-matching", "test-notmatching"); -// -// final String matchingDate = "2021-11-17"; -// final String nonMatchingDate = "2021-01-01"; -// -// final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); -// -// for (int i = 0; i < 1000; i++) { -// final boolean isMatching = randomBoolean(); -// final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); -// indexRequestBuilder.setSource( -// "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", -// XContentType.JSON -// ); -// bulkRequestBuilder.add(indexRequestBuilder); -// } -// -// assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); -// -// final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; -// spanExporter.clear(); -// -// final Request searchRequest = new Request("GET", "_search"); -// searchRequest.addParameter("search_type", "query_then_fetch"); -// searchRequest.addParameter("pre_filter_shard_size", "1"); -// searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); -// searchRequest.setOptions( -// searchRequest.getOptions() -// .toBuilder() -// .addHeader( -// "Authorization", -// UsernamePasswordToken.basicAuthHeaderValue( -// SecuritySettingsSource.TEST_USER_NAME, -// new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) -// ) -// ) -// ); -// -// final Response searchResponse = getRestClient().performRequest(searchRequest); -// -// assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); -// assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); -// } -// -// public void testDoesNotRecordSpansWhenDisabled() { -// -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() -// ) -// ) -// .actionGet(); -// -// try { -// APMTracer.CAPTURING_SPAN_EXPORTER.clear(); -// -// client().admin().cluster().prepareListTasks().get(); -// -// assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); -// } finally { -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() -// ) -// ) -// .actionGet(); -// } -// } -// -// public void testFilterByNameGivenSingleCompleteMatch() { -// -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() -// ) -// ) -// .actionGet(); -// -// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events -// -// try { -// client().admin().cluster().prepareListTasks().get(); -// -// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); -// assertThat(parentTasks, hasSize(1)); -// -// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); -// assertThat(childrenTasks, empty()); -// } finally { -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() -// ) -// ) -// .actionGet(); -// } -// } -// -// public void testFilterByNameGivenSinglePattern() { -// -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() -// ) -// ) -// .actionGet(); -// -// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events -// -// try { -// client().admin().cluster().prepareListTasks().get(); -// -// var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); -// assertThat(parentTasks, hasSize(1)); -// -// var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); -// assertThat(childrenTasks, hasSize(internalCluster().size())); -// } finally { -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() -// ) -// ) -// .actionGet(); -// } -// } -// -// public void testFilterByNameGivenTwoPatterns() { -// -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() -// ) -// ) -// .actionGet(); -// -// APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events -// -// try { -// client().admin().cluster().prepareListTasks().get(); -// client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); -// -// var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); -// assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); -// } finally { -// client().admin() -// .cluster() -// .updateSettings( -// new ClusterUpdateSettingsRequest().persistentSettings( -// Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() -// ) -// ) -// .actionGet(); -// } -// } + // @Override + // protected Collection> nodePlugins() { + // return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); + // } + // + // @Override + // protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + // Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); + // // ((MockSecureSettings) builder.getSecureSettings()).setString( + // // APMTracer.APM_ENDPOINT_SETTING.getKey(), + // // System.getProperty("tests.apm.endpoint", "") + // // ); + // // ((MockSecureSettings) builder.getSecureSettings()).setString( + // // APMTracer.APM_TOKEN_SETTING.getKey(), + // // System.getProperty("tests.apm.token", "") + // // ); + // builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); + // return builder.build(); + // } + // + // @Override + // protected boolean addMockHttpTransport() { + // return false; + // } + // + // @After + // public void clearRecordedSpans() { + // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + // } + // + // public void testModule() { + // List plugins = internalCluster().getAnyMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); + // assertThat(plugins, hasSize(1)); + // + // TransportService transportService = internalCluster().getInstance(TransportService.class); + // final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); + // assertThat(taskTracer, notNullValue()); + // + // final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); + // + // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + // + // taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); + // taskTracer.onTaskUnregistered(testTask); + // + // final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); + // boolean found = false; + // final Long targetId = testTask.getId(); + // for (SpanData capturedSpan : capturedSpans) { + // if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { + // found = true; + // assertTrue(capturedSpan.hasEnded()); + // } + // } + // assertTrue(found); + // } + // + // public void testRecordsNestedSpans() { + // + // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + // + // client().admin().cluster().prepareListTasks().get(); + // + // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + // assertThat(parentTasks, hasSize(1)); + // var parentTask = parentTasks.get(0); + // assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); + // + // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + // assertThat(childrenTasks, hasSize(internalCluster().size())); + // for (SpanData childrenTask : childrenTasks) { + // assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); + // assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); + // } + // } + // + // public void testRecovery() throws Exception { + // internalCluster().ensureAtLeastNumDataNodes(2); + // + // assertAcked( + // client().admin() + // .indices() + // .prepareCreate("test-index") + // .setSettings( + // Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + // ) + // ); + // + // ensureGreen("test-index"); + // + // indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); + // flushAndRefresh("test-index"); + // + // final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; + // spanExporter.clear(); + // + // assertAcked( + // client().admin() + // .indices() + // .prepareUpdateSettings("test-index") + // .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) + // ); + // + // ensureGreen("test-index"); + // + // final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) + // .findAny() + // .orElseThrow(() -> new AssertionError("not found")); + // + // final List clusterUpdateChildActions = spanExporter.findSpan( + // spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) + // ).map(SpanData::getName).collect(toList()); + // + // assertThat( + // clusterUpdateChildActions, + // hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) + // ); + // + // final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) + // .findAny() + // .orElseThrow(() -> new AssertionError("not found")); + // final List recoveryChildActions = spanExporter.findSpan( + // spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) + // ).map(SpanData::getName).collect(toList()); + // + // assertThat( + // recoveryChildActions, + // hasItems( + // PeerRecoveryTargetService.Actions.FILES_INFO, + // PeerRecoveryTargetService.Actions.FILE_CHUNK, + // PeerRecoveryTargetService.Actions.CLEAN_FILES, + // PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, + // PeerRecoveryTargetService.Actions.FINALIZE + // ) + // ); + // + // } + // + // public void testSearch() throws Exception { + // + // internalCluster().ensureAtLeastNumDataNodes(2); + // final int nodeCount = internalCluster().numDataNodes(); + // + // assertAcked( + // client().admin() + // .indices() + // .prepareCreate("test-matching") + // .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") + // .setSettings( + // Settings.builder() + // .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + // .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) + // ) + // ); + // + // assertAcked( + // client().admin() + // .indices() + // .prepareCreate("test-notmatching") + // .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") + // .setSettings( + // Settings.builder() + // .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + // .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) + // ) + // ); + // + // ensureGreen("test-matching", "test-notmatching"); + // + // final String matchingDate = "2021-11-17"; + // final String nonMatchingDate = "2021-01-01"; + // + // final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + // + // for (int i = 0; i < 1000; i++) { + // final boolean isMatching = randomBoolean(); + // final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); + // indexRequestBuilder.setSource( + // "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", + // XContentType.JSON + // ); + // bulkRequestBuilder.add(indexRequestBuilder); + // } + // + // assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); + // + // final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; + // spanExporter.clear(); + // + // final Request searchRequest = new Request("GET", "_search"); + // searchRequest.addParameter("search_type", "query_then_fetch"); + // searchRequest.addParameter("pre_filter_shard_size", "1"); + // searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); + // searchRequest.setOptions( + // searchRequest.getOptions() + // .toBuilder() + // .addHeader( + // "Authorization", + // UsernamePasswordToken.basicAuthHeaderValue( + // SecuritySettingsSource.TEST_USER_NAME, + // new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) + // ) + // ) + // ); + // + // final Response searchResponse = getRestClient().performRequest(searchRequest); + // + // assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); + // assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); + // } + // + // public void testDoesNotRecordSpansWhenDisabled() { + // + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() + // ) + // ) + // .actionGet(); + // + // try { + // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); + // + // client().admin().cluster().prepareListTasks().get(); + // + // assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); + // } finally { + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() + // ) + // ) + // .actionGet(); + // } + // } + // + // public void testFilterByNameGivenSingleCompleteMatch() { + // + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() + // ) + // ) + // .actionGet(); + // + // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + // + // try { + // client().admin().cluster().prepareListTasks().get(); + // + // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + // assertThat(parentTasks, hasSize(1)); + // + // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + // assertThat(childrenTasks, empty()); + // } finally { + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + // ) + // ) + // .actionGet(); + // } + // } + // + // public void testFilterByNameGivenSinglePattern() { + // + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() + // ) + // ) + // .actionGet(); + // + // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + // + // try { + // client().admin().cluster().prepareListTasks().get(); + // + // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); + // assertThat(parentTasks, hasSize(1)); + // + // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); + // assertThat(childrenTasks, hasSize(internalCluster().size())); + // } finally { + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + // ) + // ) + // .actionGet(); + // } + // } + // + // public void testFilterByNameGivenTwoPatterns() { + // + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() + // ) + // ) + // .actionGet(); + // + // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events + // + // try { + // client().admin().cluster().prepareListTasks().get(); + // client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); + // + // var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); + // assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); + // } finally { + // client().admin() + // .cluster() + // .updateSettings( + // new ClusterUpdateSettingsRequest().persistentSettings( + // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() + // ) + // ) + // .actionGet(); + // } + // } } diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java index 1a76d549919e5..afda3ae4b9892 100644 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java +++ b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java @@ -158,13 +158,7 @@ public Span startSpan() { if (this.startTimestamp == null) { this.startTimestamp = System.currentTimeMillis(); } - return new TestSpan( - spanName, - parentContext, - attributes, - spanKind, - startTimestamp - ); + return new TestSpan(spanName, parentContext, attributes, spanKind, startTimestamp); } } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index d3cd7693ff323..0f7fd1867de8e 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -84,11 +84,7 @@ public TransportSubmitAsyncSearchAction( protected void doExecute(Task submitTask, SubmitAsyncSearchRequest request, ActionListener submitListener) { final SearchRequest searchRequest = createSearchRequest(request, submitTask, request.getKeepAlive()); try (var ignored = threadContext.newTraceContext()) { - AsyncSearchTask searchTask = (AsyncSearchTask) taskManager.register( - "transport", - SearchAction.INSTANCE.name(), - searchRequest - ); + AsyncSearchTask searchTask = (AsyncSearchTask) taskManager.register("transport", SearchAction.INSTANCE.name(), searchRequest); searchAction.execute(searchTask, searchRequest, searchTask.getSearchProgressActionListener()); searchTask.addCompletionListener(new ActionListener<>() { @Override diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java index 747aae64cf3cf..5eee0cc296573 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -114,15 +114,21 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, try { ActionListener listener; if (request.isWaitForCompletion()) { - listener = ActionListener.wrap(result -> actionListener.onResponse(new Response(result)), actionListener::onFailure); + listener = ActionListener.wrap( + result -> actionListener.onResponse(new Response(result)), + actionListener::onFailure + ); } else { - listener = ActionListener.wrap(result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), e -> { - if (e instanceof TaskCancelledException) { - LOGGER.info(e.getMessage()); - } else { - LOGGER.error("failed to execute policy [" + request.getName() + "]", e); + listener = ActionListener.wrap( + result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), + e -> { + if (e instanceof TaskCancelledException) { + LOGGER.info(e.getMessage()); + } else { + LOGGER.error("failed to execute policy [" + request.getName() + "]", e); + } } - }); + ); } policyExecutor.runPolicyLocally(task, request.getName(), ActionListener.wrap(result -> { taskManager.unregister(task); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java index 2c14f35b08b9e..bf25cdbf54237 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java @@ -27,8 +27,6 @@ import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler; import org.elasticsearch.xpack.security.transport.filter.IPFilter; -import java.util.List; - import javax.net.ssl.SSLEngine; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index e4dce91484e42..97d97840513e1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -30,7 +30,6 @@ import java.nio.file.Path; import java.util.Collections; -import java.util.List; import javax.net.ssl.SSLEngine; From eefb53a0236a9735e18999c9dafb1df50cbea819 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 20 Apr 2022 21:29:44 +0100 Subject: [PATCH 62/90] Beginnings of an end-to-end APM test --- qa/apm/build.gradle | 48 ++++++ qa/apm/config/elasticsearch/roles.yml | 34 ++++ qa/apm/config/elasticsearch/service_tokens | 2 + qa/apm/config/elasticsearch/users | 9 + qa/apm/config/elasticsearch/users_roles | 13 ++ qa/apm/config/kibana/kibana-8.yml | 78 +++++++++ qa/apm/docker-compose.yml | 157 ++++++++++++++++++ qa/apm/entrypoint.sh | 25 +++ qa/apm/scripts/tls/apm-server/cert.crt | 27 +++ qa/apm/scripts/tls/apm-server/key.pem | 52 ++++++ .../org/elasticsearch/xpack/apm/ApmIT.java | 88 ++++++++++ 11 files changed, 533 insertions(+) create mode 100644 qa/apm/build.gradle create mode 100644 qa/apm/config/elasticsearch/roles.yml create mode 100644 qa/apm/config/elasticsearch/service_tokens create mode 100644 qa/apm/config/elasticsearch/users create mode 100644 qa/apm/config/elasticsearch/users_roles create mode 100644 qa/apm/config/kibana/kibana-8.yml create mode 100644 qa/apm/docker-compose.yml create mode 100755 qa/apm/entrypoint.sh create mode 100644 qa/apm/scripts/tls/apm-server/cert.crt create mode 100644 qa/apm/scripts/tls/apm-server/key.pem create mode 100644 qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle new file mode 100644 index 0000000000000..245f13422c1ef --- /dev/null +++ b/qa/apm/build.gradle @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import org.elasticsearch.gradle.Architecture +import org.elasticsearch.gradle.VersionProperties +import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER; + +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.internal-distribution-download' + +testFixtures.useFixture() + +dependencies { + testImplementation project(':client:rest-high-level') +} + +dockerCompose { + environment.put 'STACK_VERSION', VersionProperties.elasticsearch +} + +elasticsearch_distributions { + docker { + type = DOCKER + architecture = Architecture.current() + version = VersionProperties.getElasticsearch() + failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable + } +} + +tasks.named("preProcessFixture").configure { + dependsOn elasticsearch_distributions.matching { it.architecture == Architecture.current() } +} + +tasks.register("integTest", Test) { + outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } + maxParallelForks = '1' + include '**/*IT.class' +} + +tasks.named("check").configure { + dependsOn "integTest" +} diff --git a/qa/apm/config/elasticsearch/roles.yml b/qa/apm/config/elasticsearch/roles.yml new file mode 100644 index 0000000000000..91277fa8dd65d --- /dev/null +++ b/qa/apm/config/elasticsearch/roles.yml @@ -0,0 +1,34 @@ +--- +apm_server: + cluster: ['manage_ilm', 'manage_security', 'manage_api_key'] + indices: + - names: ['apm-*', 'logs-apm*', 'metrics-apm*', 'traces-apm*'] + privileges: ['write', 'create_index', 'manage', 'manage_ilm'] + applications: + - application: 'apm' + privileges: ['sourcemap:write', 'event:write', 'config_agent:read'] + resources: '*' +beats: + cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm', 'manage_security', 'manage_api_key'] + indices: + - names: ['filebeat-*', 'shrink-filebeat-*'] + privileges: ['all'] +filebeat: + cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm'] + indices: + - names: ['filebeat-*', 'shrink-filebeat-*'] + privileges: ['all'] +heartbeat: + cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm'] + indices: + - names: ['heartbeat-*', 'shrink-heartbeat-*'] + privileges: ['all'] +metricbeat: + cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm'] + indices: + - names: ['metricbeat-*', 'shrink-metricbeat-*'] + privileges: ['all'] +opbeans: + indices: + - names: ['opbeans-*'] + privileges: ['write', 'read'] diff --git a/qa/apm/config/elasticsearch/service_tokens b/qa/apm/config/elasticsearch/service_tokens new file mode 100644 index 0000000000000..02c39a69bc9bf --- /dev/null +++ b/qa/apm/config/elasticsearch/service_tokens @@ -0,0 +1,2 @@ +elastic/fleet-server/elastic-package-fleet-server-token:{PBKDF2_STRETCH}10000$PNiVyY96dHwRfoDszBvYPAz+mSLbC+NhtPh63dblDZU=$dAY1tXX1U5rXB+2Lt7m0L2LUNSb1q5nRaIqPNZTBxb8= +elastic/kibana/elastic-package-kibana-token:{PBKDF2_STRETCH}10000$wIEFHIIIZ2ap0D0iQsyw0MfB7YuFA1bHnXAmlCoL4Gg=$YxvIJnasjLZyDQZpmFBiJHdR/CGXd5BnVm013Jty6p0= diff --git a/qa/apm/config/elasticsearch/users b/qa/apm/config/elasticsearch/users new file mode 100644 index 0000000000000..4cc30a99d92f1 --- /dev/null +++ b/qa/apm/config/elasticsearch/users @@ -0,0 +1,9 @@ +admin:$2a$10$xiY0ZzOKmDDN1p3if4t4muUBwh2.bFHADoMRAWQgSClm4ZJ4132Y. +apm_server_user:$2a$10$iTy29qZaCSVn4FXlIjertuO8YfYVLCbvoUAJ3idaXfLRclg9GXdGG +apm_user_ro:$2a$10$hQfy2o2u33SapUClsx8NCuRMpQyHP9b2l4t3QqrBA.5xXN2S.nT4u +beats_user:$2a$10$LRpKi4/Q3Qo4oIbiu26rH.FNIL4aOH4aj2Kwi58FkMo1z9FgJONn2 +filebeat_user:$2a$10$sFxIEX8tKyOYgsbJLbUhTup76ssvSD3L4T0H6Raaxg4ewuNr.lUFC +heartbeat_user:$2a$10$nKUGDr/V5ClfliglJhfy8.oEkjrDtklGQfhd9r9NoFqQeoNxr7uUK +kibana_system_user:$2a$10$nN6sRtQl2KX9Gn8kV/.NpOLSk6Jwn8TehEDnZ7aaAgzyl/dy5PYzW +metricbeat_user:$2a$10$5PyTd121U2ZXnFk9NyqxPuLxdptKbB8nK5egt6M5/4xrKUkk.GReG +opbeans_user:$2a$10$iTy29qZaCSVn4FXlIjertuO8YfYVLCbvoUAJ3idaXfLRclg9GXdGG diff --git a/qa/apm/config/elasticsearch/users_roles b/qa/apm/config/elasticsearch/users_roles new file mode 100644 index 0000000000000..629fe7392c12f --- /dev/null +++ b/qa/apm/config/elasticsearch/users_roles @@ -0,0 +1,13 @@ +apm_server:apm_server_user +apm_system:apm_server_user +apm_user:apm_server_user,apm_user_ro +beats:beats_user +beats_system:beats_user,filebeat_user,heartbeat_user,metricbeat_user +filebeat:filebeat_user +heartbeat:heartbeat_user +ingest_admin:apm_server_user +kibana_system:kibana_system_user +kibana_user:apm_server_user,apm_user_ro,beats_user,filebeat_user,heartbeat_user,metricbeat_user,opbeans_user +metricbeat:metricbeat_user +opbeans:opbeans_user +superuser:admin diff --git a/qa/apm/config/kibana/kibana-8.yml b/qa/apm/config/kibana/kibana-8.yml new file mode 100644 index 0000000000000..4b3add76282d8 --- /dev/null +++ b/qa/apm/config/kibana/kibana-8.yml @@ -0,0 +1,78 @@ +xpack.fleet.packages: + - name: system + version: latest + - name: elastic_agent + version: latest + - name: apm + version: latest + - name: fleet_server + version: latest + +xpack.fleet.agentPolicies: + - name: Fleet Server + APM policy + id: fleet-server-apm-policy + description: Fleet server policy with APM and System logs and metrics enabled + namespace: default + is_default_fleet_server: true + is_managed: false + monitoring_enabled: + - logs + - metrics + package_policies: + - name: system-1 + package: + name: system + - name: apm-1 + package: + name: apm + inputs: + - type: apm + keep_enabled: true + vars: + - name: host + value: 0.0.0.0:8200 + frozen: true + - name: url + value: "${ELASTIC_APM_SERVER_URL}" + frozen: true + - name: enable_rum + value: true + frozen: true + - name: read_timeout + value: 1m + frozen: true + - name: shutdown_timeout + value: 2m + frozen: true + - name: write_timeout + value: 1m + frozen: true + - name: rum_allow_headers + value: + - x-custom-header + frozen: true + - name: secret_token + value: "${ELASTIC_APM_SECRET_TOKEN}" + frozen: true + - name: tls_enabled + value: ${ELASTIC_APM_TLS} + frozen: true + - name: tls_certificate + value: /usr/share/apmserver/config/certs/tls.crt + frozen: true + - name: tls_key + value: /usr/share/apmserver/config/certs/tls.key + frozen: true + - name: Fleet Server + package: + name: fleet_server + inputs: + - type: fleet-server + keep_enabled: true + vars: + - name: host + value: 0.0.0.0 + frozen: true + - name: port + value: 8220 + frozen: true diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml new file mode 100644 index 0000000000000..2dfd629d377fe --- /dev/null +++ b/qa/apm/docker-compose.yml @@ -0,0 +1,157 @@ +version: "2.4" + +networks: + default: + name: apm-integration-testing + +services: + apmserver: + # Referenced in the APM agent config in the ES container + # container_name: apmserver + depends_on: + kibana: + condition: service_healthy + environment: + FLEET_ELASTICSEARCH_HOST: null + FLEET_SERVER_ELASTICSEARCH_INSECURE: "1" + FLEET_SERVER_ENABLE: "1" + FLEET_SERVER_HOST: 0.0.0.0 + FLEET_SERVER_INSECURE_HTTP: "1" + FLEET_SERVER_POLICY_ID: fleet-server-apm-policy + FLEET_SERVER_PORT: "8220" + FLEET_SERVER_SERVICE_TOKEN: AAEAAWVsYXN0aWMvZmxlZXQtc2VydmVyL2VsYXN0aWMtcGFja2FnZS1mbGVldC1zZXJ2ZXItdG9rZW46bmgtcFhoQzRRQ2FXbms2U0JySGlWQQ + KIBANA_FLEET_HOST: null + KIBANA_FLEET_SERVICE_TOKEN: AAEAAWVsYXN0aWMvZmxlZXQtc2VydmVyL2VsYXN0aWMtcGFja2FnZS1mbGVldC1zZXJ2ZXItdG9rZW46bmgtcFhoQzRRQ2FXbms2U0JySGlWQQ + KIBANA_FLEET_SETUP: "1" + healthcheck: + test: + - CMD + - /bin/true + image: docker.elastic.co/beats/elastic-agent:${STACK_VERSION} + labels: + - co.elastic.apm.stack-version=${STACK_VERSION} + logging: + driver: json-file + options: + max-file: "5" + max-size: 2m + # ports: + # - 127.0.0.1:8220:8220 + # - 127.0.0.1:8200:8200 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./scripts/tls/apmserver/cert.crt:/usr/share/apmserver/config/certs/tls.crt + - ./scripts/tls/apmserver/key.pem:/usr/share/apmserver/config/certs/tls.key + + elasticsearch: + entrypoint: /entrypoint.sh + environment: + - action.destructive_requires_name=false + - bootstrap.memory_lock=true + - cluster.name=docker-cluster + - cluster.routing.allocation.disk.threshold_enabled=false + - discovery.type=single-node + - ES_JAVA_OPTS=-Xms1g -Xmx1g + - indices.id_field_data.enabled=true + - path.repo=/usr/share/elasticsearch/data/backups + - STACK_VERSION=${STACK_VERSION} + - xpack.apm.tracing.enabled=true + - xpack.license.self_generated.type=trial + - xpack.monitoring.collection.enabled=true + - xpack.security.authc.anonymous.roles=remote_monitoring_collector + - xpack.security.authc.api_key.enabled=true + - xpack.security.authc.realms.file.file1.order=0 + - xpack.security.authc.realms.native.native1.order=1 + - xpack.security.authc.token.enabled=true + - xpack.security.enabled=true + healthcheck: + interval: 20s + retries: 10 + test: + - CMD-SHELL + - curl -s -k http://localhost:9200/_cluster/health | grep -vq '"status":"red"' + image: elasticsearch:test + labels: + - co.elastic.apm.stack-version=${STACK_VERSION} + - co.elastic.metrics/module=elasticsearch + - co.elastic.metrics/metricsets=node,node_stats + - co.elastic.metrics/hosts=http://$${data.host}:9200 + logging: + driver: json-file + options: + max-file: "5" + max-size: 2m + ports: + # - 127.0.0.1:9200:9200 + - "9200" + ulimits: + memlock: + hard: -1 + soft: -1 + volumes: + - ./entrypoint.sh:/entrypoint.sh + - ./config/elasticsearch/roles.yml:/usr/share/elasticsearch/config/roles.yml + - ./config/elasticsearch/users:/usr/share/elasticsearch/config/users + - ./config/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles + - ./config/elasticsearch/service_tokens:/usr/share/elasticsearch/config/service_tokens + kibana: + depends_on: + elasticsearch: + condition: service_healthy + environment: + ELASTICSEARCH_HOSTS: http://elasticsearch:9200 + ELASTICSEARCH_PASSWORD: changeme + ELASTICSEARCH_USERNAME: kibana_system_user + ELASTIC_APM_SECRET_TOKEN: "" + ELASTIC_APM_SERVER_URL: http://apmserver:8200 + ELASTIC_APM_TLS: "false" + ENTERPRISESEARCH_HOST: http://enterprise-search:3002 + SERVER_HOST: 0.0.0.0 + SERVER_NAME: kibana.example.org + STATUS_ALLOWANONYMOUS: "true" + TELEMETRY_ENABLED: "false" + XPACK_APM_SERVICEMAPENABLED: "true" + XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: fhjskloppd678ehkdfdlliverpoolfcr + XPACK_FLEET_AGENTS_ELASTICSEARCH_HOSTS: '["http://elasticsearch:9200"]' + XPACK_FLEET_REGISTRYURL: https://epr-snapshot.elastic.co + XPACK_MONITORING_ENABLED: "true" + XPACK_REPORTING_ROLES_ENABLED: "false" + XPACK_SECURITY_ENCRYPTIONKEY: fhjskloppd678ehkdfdlliverpoolfcr + XPACK_SECURITY_LOGINASSISTANCEMESSAGE: Login details: `admin/changeme`. Further details [here](https://github.com/elastic/apm-integration-testing#logging-in). + XPACK_SECURITY_SESSION_IDLETIMEOUT: 1M + XPACK_SECURITY_SESSION_LIFESPAN: 3M + XPACK_XPACK_MAIN_TELEMETRY_ENABLED: "false" + healthcheck: + interval: 10s + retries: 30 + start_period: 10s + test: + - CMD-SHELL + - curl -s -k http://kibana:5601/api/status | grep -q 'All services are available' + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} + labels: + - co.elastic.apm.stack-version=${STACK_VERSION} + logging: + driver: json-file + options: + max-file: "5" + max-size: 2m + # ports: + # - 127.0.0.1:5601:5601 + volumes: + - ./config/kibana/kibana-8.yml:/usr/share/kibana/config/kibana.yml + + wait-service: + container_name: wait + depends_on: + apmserver: + condition: service_healthy + elasticsearch: + condition: service_healthy + kibana: + condition: service_healthy + image: busybox + +volumes: + esdata: + driver: local diff --git a/qa/apm/entrypoint.sh b/qa/apm/entrypoint.sh new file mode 100755 index 0000000000000..f9973254e3923 --- /dev/null +++ b/qa/apm/entrypoint.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Custom entrypoint to generate the `elasticapm.properties` file. This is a +# script instead of a static file so that the `service_version` can be set +# correctly, although for the purposes of this test, that may be pointless. + +set -eo pipefail + +cd /usr/share/elasticsearch/ + +cat > config/elasticapm.properties < { + final Request tracesSearchRequest = new Request("GET", "/traces-apm-default/_search"); + tracesSearchRequest.setJsonEntity(""" + { + "query": { + "match": { + "transaction.name": "GET /_nodes/stats" + } + } + }"""); + final Response tracesSearchResponse = performRequestTolerantly(tracesSearchRequest); + assertOK(tracesSearchResponse); + + final List> documents = getDocuments(nodesResponse); + assertThat(documents, hasSize(1)); + }); + } + + @SuppressWarnings("unchecked") + private List> getDocuments(Response response) throws IOException { + final Map stringObjectMap = ESRestTestCase.entityAsMap(response); + return (List>) XContentMapValues.extractValue( + "hits.hits._source", + stringObjectMap + ); + } + + private Response performRequestTolerantly(Request request) { + try { + return client().performRequest(request); + } catch (Exception e) { + throw new AssertionError(e); + } + } + + private String getProperty(String key) { + String value = System.getProperty(key); + if (value == null) { + throw new IllegalStateException( + "Could not find system properties from test.fixtures. " + + "This test expects to run with the elasticsearch.test.fixtures Gradle plugin" + ); + } + return value; + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin", new SecureString("changeme".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + protected String getTestRestCluster() { + return "localhost:" + getProperty("test.fixtures.elasticsearch.tcp.9200"); + } +} From e9695d27c7148cad385181a8fc185ee5262413e0 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 21 Apr 2022 15:53:59 +0100 Subject: [PATCH 63/90] Get the APM integration test working Also disable using Scope in APMTracer for now, it seems to break things? --- qa/apm/build.gradle | 1 + .../org/elasticsearch/xpack/apm/ApmIT.java | 85 ++++++++++++------- .../elasticsearch/xpack/apm/APMTracer.java | 7 +- 3 files changed, 58 insertions(+), 35 deletions(-) diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index 245f13422c1ef..ed90572ac13dc 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -22,6 +22,7 @@ dependencies { dockerCompose { environment.put 'STACK_VERSION', VersionProperties.elasticsearch + removeContainers = false } elasticsearch_distributions { diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index 330f2a0ad1495..7097051e8732a 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -14,48 +14,57 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.not; public class ApmIT extends ESRestTestCase { - public void testName() throws Exception { - final Request nodesRequest = new Request("GET", "/_nodes/stats"); - final Response nodesResponse = client().performRequest(nodesRequest); - assertOK(nodesResponse); + /** + * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. + */ + public void testCapturesTracesForHttpTraffic() throws Exception { + for (int i = 0; i < 20; i++) { + final Request nodesRequest = new Request("GET", "/_nodes/stats"); + final Response nodesResponse = client().performRequest(nodesRequest); + assertOK(nodesResponse); + } assertBusy(() -> { + logger.error("Looping..."); final Request tracesSearchRequest = new Request("GET", "/traces-apm-default/_search"); tracesSearchRequest.setJsonEntity(""" - { - "query": { - "match": { - "transaction.name": "GET /_nodes/stats" + { + "query": { + "match": { "transaction.name": "GET /_nodes/stats" } } - } - }"""); + }"""); final Response tracesSearchResponse = performRequestTolerantly(tracesSearchRequest); assertOK(tracesSearchResponse); - final List> documents = getDocuments(nodesResponse); - assertThat(documents, hasSize(1)); - }); + final List> documents = getDocuments(tracesSearchResponse); + assertThat(documents, not(empty())); + }, 1, TimeUnit.MINUTES); } - @SuppressWarnings("unchecked") - private List> getDocuments(Response response) throws IOException { - final Map stringObjectMap = ESRestTestCase.entityAsMap(response); - return (List>) XContentMapValues.extractValue( - "hits.hits._source", - stringObjectMap - ); + /** + * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES. + */ + @Override + protected boolean preserveClusterUponCompletion() { + return true; } + /** + * Turns exceptions into assertion failures so that {@link #assertBusy(CheckedRunnable)} can still retry. + */ private Response performRequestTolerantly(Request request) { try { return client().performRequest(request); @@ -64,25 +73,37 @@ private Response performRequestTolerantly(Request request) { } } - private String getProperty(String key) { - String value = System.getProperty(key); - if (value == null) { - throw new IllegalStateException( - "Could not find system properties from test.fixtures. " - + "This test expects to run with the elasticsearch.test.fixtures Gradle plugin" - ); - } - return value; - } - + /** + * Customizes the client settings to use the same username / password that is configured in Docke.r + */ @Override protected Settings restClientSettings() { String token = basicAuthHeaderValue("admin", new SecureString("changeme".toCharArray())); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } + /** + * Constructs the correct cluster address by looking up the dynamic port that Elasticsearch is exposed on. + */ @Override protected String getTestRestCluster() { return "localhost:" + getProperty("test.fixtures.elasticsearch.tcp.9200"); } + + @SuppressWarnings("unchecked") + private List> getDocuments(Response response) throws IOException { + final Map stringObjectMap = ESRestTestCase.entityAsMap(response); + return (List>) XContentMapValues.extractValue("hits.hits._source", stringObjectMap); + } + + private String getProperty(String key) { + String value = System.getProperty(key); + if (value == null) { + throw new IllegalStateException( + "Could not find system properties from test.fixtures. " + + "This test expects to run with the elasticsearch.test.fixtures Gradle plugin" + ); + } + return value; + } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 90332bea50c69..96f124fcbd8fd 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -67,7 +67,7 @@ Span span() { void close() { this.span().end(); - this.scope.close(); +// this.scope.close(); } } @@ -172,7 +172,7 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { setSpanAttributes(threadContext, traceable, spanBuilder); final Span span = spanBuilder.startSpan(); final Context contextForNewSpan = Context.current().with(span); - final Scope scope = contextForNewSpan.makeCurrent(); +// final Scope scope = contextForNewSpan.makeCurrent(); final Map spanHeaders = new HashMap<>(); services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); @@ -184,7 +184,8 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // propagated threadContext.putHeader(spanHeaders); - return new ContextScope(contextForNewSpan, scope); +// return new ContextScope(contextForNewSpan, scope); + return new ContextScope(contextForNewSpan, null); })); } From 7164dd80512de8bbcad17ca6c3bdbd2121dcdc05 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 26 Apr 2022 12:34:12 +0100 Subject: [PATCH 64/90] Test fixes --- .../http/netty4/Netty4BadRequestTests.java | 4 +-- .../org/elasticsearch/tasks/TaskManager.java | 5 ++++ .../transport/TransportService.java | 12 ++++++++ .../cluster/coordination/JoinHelperTests.java | 4 ++- .../discovery/PeerFinderTests.java | 4 ++- .../indices/cluster/ClusterStateChanges.java | 4 ++- .../elasticsearch/tasks/TaskManagerTests.java | 30 +++++++++++++++++-- .../transport/InboundHandlerTests.java | 7 +++-- 8 files changed, 60 insertions(+), 10 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java index d3ea7c5bfd7c0..f6c9f0d6979bd 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4BadRequestTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.netty4.SharedGroupFactory; import org.junit.After; import org.junit.Before; @@ -37,7 +38,6 @@ import java.io.UncheckedIOException; import java.util.Collection; import java.util.Collections; -import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -90,7 +90,7 @@ public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, dispatcher, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new SharedGroupFactory(Settings.EMPTY), - List.of() + Tracer.NOOP ) ) { httpServerTransport.start(); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 99ece2e064de1..e4ada1b79be1e 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -100,6 +100,11 @@ public class TaskManager implements ClusterStateApplier { private final Map channelPendingTaskTrackers = ConcurrentCollections.newConcurrentMap(); private final SetOnce cancellationService = new SetOnce<>(); + // For testing + public TaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { + this(settings, threadPool, taskHeaders, Tracer.NOOP); + } + public TaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders, Tracer tracer) { this.threadPool = threadPool; this.taskHeaders = taskHeaders.toArray(Strings.EMPTY_ARRAY); diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 5fc20f8b84e23..f97007ba16610 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -169,6 +169,18 @@ public String toString() { } }; + public TransportService( + Settings settings, + Transport transport, + ThreadPool threadPool, + TransportInterceptor transportInterceptor, + Function localNodeFactory, + @Nullable ClusterSettings clusterSettings, + Set taskHeaders + ) { + this(settings, transport, threadPool, transportInterceptor, localNodeFactory, clusterSettings, taskHeaders, Tracer.NOOP); + } + /** * Build the service. * diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java index 5bf587451d4e2..4d1b141648066 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.AbstractSimpleTransportTestCase; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.RemoteTransportException; @@ -80,7 +81,8 @@ public void testJoinDeduplication() { x -> localNode, null, Collections.emptySet(), - new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()) + new ClusterConnectionManager(Settings.EMPTY, capturingTransport, threadPool.getThreadContext()), + Tracer.NOOP ); JoinHelper joinHelper = new JoinHelper( Settings.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index 53c163172fce5..7aa3bc3c09bae 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.test.transport.CapturingTransport.CapturedRequest; import org.elasticsearch.test.transport.StubbableConnectionManager; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.ClusterConnectionManager; import org.elasticsearch.transport.ConnectionManager; import org.elasticsearch.transport.TransportException; @@ -234,7 +235,8 @@ public void setup() { boundTransportAddress -> localNode, null, emptySet(), - connectionManager + connectionManager, + Tracer.NOOP ); transportService.start(); diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 7b3e0cae1638c..e5ca541e9f6c3 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -89,6 +89,7 @@ import org.elasticsearch.snapshots.EmptySnapshotsInfoService; import org.elasticsearch.test.gateway.TestGatewayAllocator; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -196,7 +197,8 @@ public ClusterStateChanges(NamedXContentRegistry xContentRegistry, ThreadPool th TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundAddress -> DiscoveryNode.createLocal(SETTINGS, boundAddress.publishAddress(), UUIDs.randomBase64UUID()), clusterSettings, - Collections.emptySet() + Collections.emptySet(), + Tracer.NOOP ); IndexMetadataVerifier indexMetadataVerifier = new IndexMetadataVerifier(SETTINGS, xContentRegistry, null, null, null) { // metadata upgrader should do nothing diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java index 6e40e9434141e..0088227955455 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.FakeTcpChannel; import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransportChannel; @@ -28,6 +29,7 @@ import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.Before; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Collections; @@ -45,7 +47,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.in; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TaskManagerTests extends ESTestCase { @@ -75,7 +80,7 @@ public void testResultsServiceRetryTotalTime() { } public void testTrackingChannelTask() throws Exception { - final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), Tracer.NOOP); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); final var transportServiceMock = mock(TransportService.class); when(transportServiceMock.getThreadPool()).thenReturn(threadPool); @@ -125,7 +130,7 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF } public void testTrackingTaskAndCloseChannelConcurrently() throws Exception { - final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), Tracer.NOOP); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); final var transportServiceMock = mock(TransportService.class); when(transportServiceMock.getThreadPool()).thenReturn(threadPool); @@ -184,7 +189,7 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF } public void testRemoveBansOnChannelDisconnects() throws Exception { - final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), Tracer.NOOP); final var transportServiceMock = mock(TransportService.class); when(transportServiceMock.getThreadPool()).thenReturn(threadPool); taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @@ -230,6 +235,25 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF assertThat(taskManager.numberOfChannelPendingTaskTrackers(), equalTo(0)); } + public void testRegisterTaskStartsTracing() { + final Tracer mockTracer = Mockito.mock(Tracer.class); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), mockTracer); + + final Task task = taskManager.register("testType", "testAction", new TaskAwareRequest() { + + @Override + public void setParentTask(TaskId taskId) { + } + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + }); + + verify(mockTracer).onTraceStarted(any(), eq(task)); + } + static class CancellableRequest extends TransportRequest { private final String requestId; diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index d7ba04afef70b..8b586a826a21e 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.junit.After; import org.junit.Before; @@ -106,7 +107,8 @@ public void testPing() throws Exception { (request, channel, task) -> channelCaptor.set(channel), ThreadPool.Names.SAME, false, - true + true, + Tracer.NOOP ); requestHandlers.registerHandler(registry); @@ -154,7 +156,8 @@ public TestResponse read(StreamInput in) throws IOException { }, ThreadPool.Names.SAME, false, - true + true, + Tracer.NOOP ); requestHandlers.registerHandler(registry); String requestValue = randomAlphaOfLength(10); From e56427b8b3fd29fb3d462b83cd0ece2d4ec4f549 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 27 Apr 2022 11:42:00 +0100 Subject: [PATCH 65/90] Add support for opening Scope via the Tracer --- TRACING.md | 77 ++++++++--- .../http/DefaultRestChannel.java | 124 ++++++++++-------- .../org/elasticsearch/http/HttpTracer.java | 5 + .../java/org/elasticsearch/node/Node.java | 1 + .../org/elasticsearch/tracing/Tracer.java | 15 ++- .../src/main/config/elasticapm.properties | 25 +++- .../elasticsearch/xpack/apm/APMTracer.java | 64 ++++----- 7 files changed, 198 insertions(+), 113 deletions(-) diff --git a/TRACING.md b/TRACING.md index 7c8f6999ef0e1..02cd11115e88e 100644 --- a/TRACING.md +++ b/TRACING.md @@ -5,24 +5,25 @@ us to gather traces and analyze what Elasticsearch is doing. ## How is tracing implemented? -The Elasticsearch server code contains a -[`tracing`](./server/src/main/java/org/elasticsearch/tracing/) package, which is +The Elasticsearch server code contains a [`tracing`][tracing] package, which is an abstraction over the OpenTelemetry API. All locations in the code that -performing instrumentation and tracing must use these abstractions. +perform instrumentation and tracing must use these abstractions. Separately, there is the [`apm-integration`](./x-pack/plugins/apm-integration/) -module, which works with the OpenTelemetry API directly to manipulate spans. +module, which works with the OpenTelemetry API directly to record trace data. ## Where is tracing data sent? You need to have an OpenTelemetry server running somewhere. For example, you can -create a deployment in Elastic Cloud, and use Elastic's APM integration. +create a deployment in Elastic Cloud with Elastic's APM integration. ## How is tracing data sent? -This branch uses the OpenTelemetry SDK, which is a reference implementation of -the API. Work is underway to use the Elastic APM agent for Java, which attaches -at runtime and removes the need for Elasticsearch to hard-code the use of an SDK. +We use Elastic's APM agent for Java, which attaches at runtime to the +Elasticsearch JVM, which removes the need for Elasticsearch to hard-code the use +of an SDK. The agent is configured using a configuration file at +"config/elasticapm.properties". By default, the agent is disabled, so it is +present as a Java agent, but will do nothing. ## What do we trace? @@ -33,33 +34,69 @@ task in the tracing system. We also instrument REST requests, which are not (at present) modelled by tasks. A span can be associated with a parent span, which allows all spans in, for -example, a REST request to be grouped together. Spans can track the -Elasticsearch supports the [W3c -Trace Context](https://www.w3.org/TR/trace-context/) headers. It also uses these +example, a REST request to be grouped together. Spans can track work across +different Elasticsearch nodes. + +Elasticsearch also supports distributed tracing via [W3c Trace Context][w3c] +headers. If clients of Elasticsearch send these headers with their requests, +then that data will be forwarded to the APM server in order to yield a trace +across systems. ## Thread contexts and nested spans When a span is started, Elasticsearch tracks information about that span in the -current [thread -context](./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java). -When a nested span is started, a new thread context is created, and the current -span information is moved so that it becomes the parent span information. +current [thread context][thread-context]. If a new thread context is created, +then current span information is propagated but renamed, so that (1) it doesn't +interfere when new trace information is set in the context, and (2) the previous +trace information is available to establish a parent / child span relationship. Sometimes we need to detach new spans from their parent. For example, creating an index starts some related background tasks, but these shouldn't be associated with the REST request, otherwise all the background task spans will be associated with the REST request for as long as Elasticsearch is running. +`ThreadContext` provides the `clearTraceContext`() method for this purpose. ## How to I trace something that isn't a task? First work out if you can turn it into a task. No, really. -If you can't do that, you'll need to ensure that your class can access the -`Node`'s tracers, then call the appropriate methods on the tracers when a span -should start and end. +If you can't do that, you'll need to ensure that your class can get access to a +`Tracer` instance (this is available to inject, or you'll need to pass it when +your class is created). Then you need to call the appropriate methods on the +tracers when a span should start and end. + +## What additional attributes should I set? + +That's up to you. Be careful about capture anything that could leak sensitive +or personal information. + +## What is "scope" and when should I used it? + +Usually you won't need to. + +That said, sometimes you may want more details to be captured about a particular +section of code. You can think of "scope" as representing the currently active +tracing context. Using scope allows the APM agent to do the following: + +* Enables automatic correlation between the "active span" and logging, where + logs have also been captured. +* Enables capturing any exceptions thrown when the span is active, and linking + those exceptions to the span +* Allows the sampling profiler to be used as it allows samples to be linked to + the active span (if any), so the agent can automatically get extra spans + without manual instrumentation. + +In the OpenTelemetry documentation, spans, scope and context are fairly +straightforward to use, since `Scope` is an `AutoCloseable` and so can be +easily created and cleaned up use try-with-resources blocks. Unfortunately, +Elasticsearch is a complex piece of software, and also extremely asynchronous, +so the typical OpenTelemetry examples do not work. -## What attributes should I set? +Nonetheless, it is possible to manually use scope where we need more detail by +explicitly opening a scope via the `Tracer`. -TODO. [otel]: https://opentelemetry.io/ +[thread-context]: ./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java). +[w3c]: https://www.w3.org/TR/trace-context/ +[tracing]: ./server/src/main/java/org/elasticsearch/tracing/ diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index c5010ef07a05d..dbb3cb299861a 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -79,78 +79,88 @@ protected BytesStreamOutput newBytesOutput() { @Override public void sendResponse(RestResponse restResponse) { - // We're sending a response so we know we won't be needing the request content again and release it - httpRequest.release(); + try (var ignored = tracer.withScope(this)) { + // We're sending a response so we know we won't be needing the request content again and release it + httpRequest.release(); - final ArrayList toClose = new ArrayList<>(3); - if (HttpUtils.shouldCloseConnection(httpRequest)) { - toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); - } - - boolean success = false; - String opaque = null; - String contentLength = null; - final AtomicBoolean traceStopped = new AtomicBoolean(false); - final Runnable onFinish = () -> { - Releasables.close(toClose); - if (traceStopped.compareAndSet(false, true)) { - tracer.onTraceStopped(this); + final ArrayList toClose = new ArrayList<>(3); + if (HttpUtils.shouldCloseConnection(httpRequest)) { + toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); } - }; - try { - final BytesReference content = restResponse.content(); - if (content instanceof Releasable) { - toClose.add((Releasable) content); - } - toClose.add(this::releaseOutputBuffer); + boolean success = false; + String opaque = null; + String contentLength = null; + final AtomicBoolean traceStopped = new AtomicBoolean(false); + final Runnable onFinish = () -> { + Releasables.close(toClose); + if (traceStopped.compareAndSet(false, true)) { + tracer.onTraceStopped(this); + } + }; - BytesReference finalContent = content; try { - if (request.method() == RestRequest.Method.HEAD) { - finalContent = BytesArray.EMPTY; + final BytesReference content = restResponse.content(); + if (content instanceof Releasable) { + toClose.add((Releasable) content); + } + toClose.add(this::releaseOutputBuffer); + + BytesReference finalContent = content; + try { + if (request.method() == RestRequest.Method.HEAD) { + finalContent = BytesArray.EMPTY; + } + } catch (IllegalArgumentException ignoredException) { + assert restResponse.status() == RestStatus.METHOD_NOT_ALLOWED + : "request HTTP method is unsupported but HTTP status is not METHOD_NOT_ALLOWED(405)"; } - } catch (IllegalArgumentException ignored) { - assert restResponse.status() == RestStatus.METHOD_NOT_ALLOWED - : "request HTTP method is unsupported but HTTP status is not METHOD_NOT_ALLOWED(405)"; - } - final HttpResponse httpResponse = httpRequest.createResponse(restResponse.status(), finalContent); + final HttpResponse httpResponse = httpRequest.createResponse(restResponse.status(), finalContent); - corsHandler.setCorsResponseHeaders(httpRequest, httpResponse); + corsHandler.setCorsResponseHeaders(httpRequest, httpResponse); - opaque = request.header(X_OPAQUE_ID_HTTP_HEADER); - if (opaque != null) { - setHeaderField(httpResponse, X_OPAQUE_ID_HTTP_HEADER, opaque); - } + opaque = request.header(X_OPAQUE_ID_HTTP_HEADER); + if (opaque != null) { + setHeaderField(httpResponse, X_OPAQUE_ID_HTTP_HEADER, opaque); + } - // Add all custom headers - addCustomHeaders(httpResponse, restResponse.getHeaders()); - addCustomHeaders(httpResponse, restResponse.filterHeaders(threadContext.getResponseHeaders())); + // Add all custom headers + addCustomHeaders(httpResponse, restResponse.getHeaders()); + addCustomHeaders(httpResponse, restResponse.filterHeaders(threadContext.getResponseHeaders())); - // If our response doesn't specify a content-type header, set one - setHeaderField(httpResponse, CONTENT_TYPE, restResponse.contentType(), false); - // If our response has no content-length, calculate and set one - contentLength = String.valueOf(restResponse.content().length()); - setHeaderField(httpResponse, CONTENT_LENGTH, contentLength, false); + // If our response doesn't specify a content-type header, set one + setHeaderField(httpResponse, CONTENT_TYPE, restResponse.contentType(), false); + // If our response has no content-length, calculate and set one + contentLength = String.valueOf(restResponse.content().length()); + setHeaderField(httpResponse, CONTENT_LENGTH, contentLength, false); - addCookies(httpResponse); + addCookies(httpResponse); - tracer.setAttribute(this, "http.status_code", restResponse.status().getStatus()); - restResponse.getHeaders() - .forEach((key, values) -> tracer.setAttribute(this, "http.response.headers." + key, String.join("; ", values))); + tracer.setAttribute(this, "http.status_code", restResponse.status().getStatus()); + restResponse.getHeaders() + .forEach((key, values) -> tracer.setAttribute(this, "http.response.headers." + key, String.join("; ", values))); - ActionListener listener = ActionListener.wrap(onFinish); - tracer.onTraceEvent(this, "startResponse"); - try (ThreadContext.StoredContext existing = threadContext.stashContext()) { - httpChannel.sendResponse(httpResponse, listener); - } - success = true; - } finally { - if (success == false) { - onFinish.run(); + ActionListener listener = ActionListener.wrap(onFinish); + tracer.onTraceEvent(this, "startResponse"); + try (ThreadContext.StoredContext existing = threadContext.stashContext()) { + httpChannel.sendResponse(httpResponse, listener); + } + success = true; + } finally { + if (success == false) { + onFinish.run(); + } + tracer.maybeLogResponse( + httpRequest.uri(), + restResponse, + httpChannel, + contentLength, + opaque, + request.getRequestId(), + success + ); } - tracer.maybeLogResponse(httpRequest.uri(), restResponse, httpChannel, contentLength, opaque, request.getRequestId(), success); } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index a66d27a4d1fac..e0721b3f09f1e 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -79,6 +80,10 @@ void setAttribute(Traceable traceable, String key, String value) { this.tracer.setAttribute(traceable, key, value); } + Releasable withScope(Traceable traceable) { + return tracer.withScope(traceable); + } + /** * Logs the given request if request tracing is enabled and the request uri matches the current include and exclude patterns defined * in {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_INCLUDE} and {@link HttpTransportSettings#SETTING_HTTP_TRACE_LOG_EXCLUDE}. diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 442c8acc7d659..622221f04a5bb 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -994,6 +994,7 @@ protected Node( b.bind(DesiredNodesSettingsValidator.class).toInstance(desiredNodesSettingsValidator); b.bind(HealthService.class).toInstance(healthService); b.bind(StatsRequestLimiter.class).toInstance(statsRequestLimiter); + b.bind(Tracer.class).toInstance(tracer); }); if (ReadinessService.enabled(environment)) { diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index 36ce1f3fa9428..0afa37e1d49a5 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -9,6 +9,7 @@ package org.elasticsearch.tracing; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Releasable; /** * Represents a distributed tracing system that keeps track of the start and end of various activities in the cluster. @@ -17,11 +18,14 @@ public interface Tracer { /** * Called when the {@link Traceable} activity starts. + * @param threadContext the current context. Required for tracing parent/child span activity. + * @param traceable the thing to start tracing */ void onTraceStarted(ThreadContext threadContext, Traceable traceable); /** * Called when the {@link Traceable} activity ends. + * @param traceable the thing to stop tracing */ void onTraceStopped(Traceable traceable); @@ -37,11 +41,11 @@ public interface Tracer { void setAttribute(Traceable traceable, String key, String value); + Releasable withScope(Traceable traceable); + Tracer NOOP = new Tracer() { @Override - public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { - return; - } + public void onTraceStarted(ThreadContext threadContext, Traceable traceable) {} @Override public void onTraceStopped(Traceable traceable) {} @@ -63,5 +67,10 @@ public void setAttribute(Traceable traceable, String key, long value) {} @Override public void setAttribute(Traceable traceable, String key, String value) {} + + @Override + public Releasable withScope(Traceable traceable) { + return () -> {}; + } }; } diff --git a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties index 8f02ac380b111..0d11ce1d34be1 100644 --- a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties +++ b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties @@ -1,21 +1,44 @@ # Set to `true` to enable the APM agent enabled: false +# Identifies the version of Elasticsearch in the captured trace data. service_version: @es.version@ -# ES does not use auto-instrumentation +# ES does not use auto-instrumentation. instrument: false # Required for OpenTelemetry support enable_experimental_instrumentations: true +# Configures the APM server to which traces will be sent. # server_url: + +# Configures the token to present to the server for authn / authz. # secret_token: +# This is used to keep all the errors and transactions of your service +# together and is the primary filter in the Elastic APM user interface. service_name: elasticsearch + +# If set, this name is used to distinguish between different nodes of a +# service, therefore it should be unique for each JVM within a service. If +# not set, data aggregations will be done based on a container ID (where +# valid) or on the reported hostname (automatically discovered or manually +# configured through hostname). +# # service_node_name: node1 +# An arbitrary string that identifies this deployment environment. For +# example, "dev", "staging" or "prod". Can be anything you like, but must +# have the same value across different systems in the same deployment +# environment. environment: dev +# Logging configuration. Unless you need detailed logs about what the APM +# is doing, leave this value alone. log_level: error + +# Configures a log file to write to. `_AGENT_HOME_` is a placeholder used +# by the agent. Don't disable writing to a log file, as the agent will then +# require extra Security Manager permissions and it's just painful. log_file: _AGENT_HOME_/../../logs/apm.log diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 96f124fcbd8fd..f89534d3d7ffd 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -14,7 +14,6 @@ import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; -import io.opentelemetry.context.Scope; import io.opentelemetry.context.propagation.TextMapGetter; import org.apache.logging.log4j.LogManager; @@ -27,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Releasable; import org.elasticsearch.tasks.Task; import org.elasticsearch.tracing.Traceable; @@ -34,7 +34,6 @@ import java.security.PrivilegedAction; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -60,18 +59,17 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic NodeScope ); - private record ContextScope(Context context, Scope scope) { + private record ContextWrapper(Context context) { Span span() { return Span.fromContextOrNull(this.context); } void close() { this.span().end(); -// this.scope.close(); } } - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ClusterService clusterService; private volatile boolean enabled; @@ -153,7 +151,7 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { return; } - spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { + spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); // https://github.com/open-telemetry/opentelemetry-java/discussions/2884#discussioncomment-381870 @@ -172,7 +170,6 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { setSpanAttributes(threadContext, traceable, spanBuilder); final Span span = spanBuilder.startSpan(); final Context contextForNewSpan = Context.current().with(span); -// final Scope scope = contextForNewSpan.makeCurrent(); final Map spanHeaders = new HashMap<>(); services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); @@ -184,11 +181,16 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // propagated threadContext.putHeader(spanHeaders); -// return new ContextScope(contextForNewSpan, scope); - return new ContextScope(contextForNewSpan, null); + return new ContextWrapper(contextForNewSpan); })); } + @Override + public Releasable withScope(Traceable traceable) { + var scope = spans.get(traceable.getSpanId()).context.makeCurrent(); + return scope::close; + } + private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, SpanBuilder spanBuilder) { for (Map.Entry entry : traceable.getAttributes().entrySet()) { final Object value = entry.getValue(); @@ -223,41 +225,41 @@ private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, @Override public void onTraceException(Traceable traceable, Throwable throwable) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().recordException(throwable); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().recordException(throwable); } } @Override public void setAttribute(Traceable traceable, String key, boolean value) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().setAttribute(key, value); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, double value) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().setAttribute(key, value); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, long value) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().setAttribute(key, value); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, String value) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().setAttribute(key, value); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().setAttribute(key, value); } } @@ -291,17 +293,17 @@ private Context getRemoteParentContext(ThreadContext threadContext) { @Override public void onTraceStopped(Traceable traceable) { - final var contextScope = spans.remove(traceable.getSpanId()); - if (contextScope != null) { - contextScope.close(); + final var context = spans.remove(traceable.getSpanId()); + if (context != null) { + context.close(); } } @Override public void onTraceEvent(Traceable traceable, String eventName) { - final var contextScope = spans.get(traceable.getSpanId()); - if (contextScope != null) { - contextScope.span().addEvent(eventName); + final var context = spans.get(traceable.getSpanId()); + if (context != null) { + context.span().addEvent(eventName); } } @@ -321,6 +323,4 @@ public String get(Map carrier, String key) { private static boolean isSupportedContextKey(String key) { return TRACE_HEADERS.contains(key); } - - private static final Set GRAPHVIZ_CACHE = new HashSet<>(); } From aad7f4c3e9da88a17d0b8098d3804f780b27ac5d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 10:50:19 +0100 Subject: [PATCH 66/90] Make it possible to configure APM agent via settings API --- TRACING.md | 23 ++ .../testclusters/ElasticsearchNode.java | 53 +++- qa/apm/build.gradle | 2 +- qa/apm/docker-compose.yml | 1 - qa/apm/entrypoint.sh | 19 +- .../org/elasticsearch/xpack/apm/ApmIT.java | 53 +++- .../common/settings/Setting.java | 2 +- .../src/main/config/elasticapm.properties | 15 +- .../java/org/elasticsearch/xpack/apm/APM.java | 2 +- .../xpack/apm/APMAgentSettings.java | 266 ++++++++++++++++++ .../elasticsearch/xpack/apm/APMTracer.java | 60 ++-- .../plugin-metadata/plugin-security.policy | 1 + 12 files changed, 426 insertions(+), 71 deletions(-) create mode 100644 x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java diff --git a/TRACING.md b/TRACING.md index 02cd11115e88e..547e3869849aa 100644 --- a/TRACING.md +++ b/TRACING.md @@ -12,6 +12,27 @@ perform instrumentation and tracing must use these abstractions. Separately, there is the [`apm-integration`](./x-pack/plugins/apm-integration/) module, which works with the OpenTelemetry API directly to record trace data. +## How is tracing configured? + +We have a config file in [`config/elasticapm.properties`][config], which +configures settings that are not dynamic, or should not be changed at runtime. +Other settings can be configured at runtime by using the cluster settings API, +and setting `xpack.apm.tracing.agent.` with a string value, where `` +is the APM agent key that you want to configure. + +### More details about configuration + +The APM agent pulls configuration from [multiple sources][agent-config], with a +hierarchy that means, for example, that options set in the config file cannot be +overridden via system properties. This is a little unfortunate, since it means +that Elasticsearch cannot ship with sensible defaults for dynamic settings in +the config file, and override them via system properties. + +Instead, static or sensitive config values are put in the config file, and +dynamic settings are left entirely to the system properties. The Elasticsearch +APM plugin has appropriate security access to set the APM-related system +properties. + ## Where is tracing data sent? You need to have an OpenTelemetry server running somewhere. For example, you can @@ -100,3 +121,5 @@ explicitly opening a scope via the `Tracer`. [thread-context]: ./server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java). [w3c]: https://www.w3.org/TR/trace-context/ [tracing]: ./server/src/main/java/org/elasticsearch/tracing/ +[config]: ./x-pack/plugin/apm-integration/src/main/config/elasticapm.properties +[agent-config]: https://www.elastic.co/guide/en/apm/agent/java/master/configuration.html diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index c1fb2d0d8cdf0..738bfb4dda91c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -65,11 +65,14 @@ import java.io.UncheckedIOException; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.BasicFileAttributes; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -1356,19 +1359,12 @@ private void createConfiguration() { StandardOpenOption.CREATE ); - final List configFiles; - try (Stream stream = Files.list(getDistroDir().resolve("config"))) { - configFiles = stream.collect(Collectors.toList()); - } - logToProcessStdout("Copying additional config files from distro " + configFiles); - for (Path file : configFiles) { - Path dest = configFile.getParent().resolve(file.getFileName()); - if (Files.exists(dest) == false) { - Files.copy(file, dest); - } - } + final Path distConfigDir = getDistroDir().resolve("config"); + final RecursiveCopyFileVisitor visitor = new RecursiveCopyFileVisitor(distConfigDir); + Files.walkFileTree(distConfigDir, visitor); + logToProcessStdout("Copied additional config files from distro: " + visitor.getCopiedFiles()); } catch (IOException e) { - throw new UncheckedIOException("Could not write config file: " + configFile, e); + throw new UncheckedIOException("Could not write config file: " + e.getMessage(), e); } tweakJvmOptions(configFileRoot); @@ -1686,4 +1682,37 @@ private static class LinkCreationException extends UncheckedIOException { super(message, cause); } } + + private class RecursiveCopyFileVisitor extends SimpleFileVisitor { + private final Path sourceDir; + private final List copiedFiles; + + RecursiveCopyFileVisitor(Path sourceDir) { + this.sourceDir = sourceDir; + this.copiedFiles = new ArrayList<>(); + } + + public List getCopiedFiles() { + return copiedFiles; + } + + @Override + public FileVisitResult preVisitDirectory(Path sourceDir, BasicFileAttributes attrs) throws IOException { + final Path relativePath = this.sourceDir.relativize(sourceDir); + final Path destPath = configFile.getParent().resolve(relativePath); + if (Files.notExists(destPath)) { + Files.createDirectory(destPath); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(Path sourcePath, BasicFileAttributes attrs) throws IOException { + final Path relativePath = sourceDir.relativize(sourcePath); + final Path destPath = configFile.getParent().resolve(relativePath); + Files.copy(sourcePath, destPath, StandardCopyOption.REPLACE_EXISTING); + copiedFiles.add(sourcePath); + return FileVisitResult.CONTINUE; + } + } } diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index ed90572ac13dc..fed016fe6b127 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -22,7 +22,7 @@ dependencies { dockerCompose { environment.put 'STACK_VERSION', VersionProperties.elasticsearch - removeContainers = false + // removeContainers = false } elasticsearch_distributions { diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml index 2dfd629d377fe..620c8aa69ace7 100644 --- a/qa/apm/docker-compose.yml +++ b/qa/apm/docker-compose.yml @@ -54,7 +54,6 @@ services: - ES_JAVA_OPTS=-Xms1g -Xmx1g - indices.id_field_data.enabled=true - path.repo=/usr/share/elasticsearch/data/backups - - STACK_VERSION=${STACK_VERSION} - xpack.apm.tracing.enabled=true - xpack.license.self_generated.type=trial - xpack.monitoring.collection.enabled=true diff --git a/qa/apm/entrypoint.sh b/qa/apm/entrypoint.sh index f9973254e3923..24fb4316fb91c 100755 --- a/qa/apm/entrypoint.sh +++ b/qa/apm/entrypoint.sh @@ -8,18 +8,11 @@ set -eo pipefail cd /usr/share/elasticsearch/ -cat > config/elasticapm.properties < { logger.error("Looping..."); final Request tracesSearchRequest = new Request("GET", "/traces-apm-default/_search"); tracesSearchRequest.setJsonEntity(""" - { - "query": { - "match": { "transaction.name": "GET /_nodes/stats" } - } - }"""); + { + "query": { + "match": { "transaction.name": "GET /_nodes/stats" } + } + }"""); final Response tracesSearchResponse = performRequestTolerantly(tracesSearchRequest); assertOK(tracesSearchResponse); @@ -54,6 +79,14 @@ public void testCapturesTracesForHttpTraffic() throws Exception { }, 1, TimeUnit.MINUTES); } + private void generateTraces() throws IOException { + for (int i = 0; i < 20; i++) { + final Request nodesRequest = new Request("GET", "/_nodes/stats"); + final Response nodesResponse = client().performRequest(nodesRequest); + assertOK(nodesResponse); + } + } + /** * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES. */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 46b3d7174ce8c..8bf63f16b6cf7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -1002,7 +1002,7 @@ public Set getNamespaces(Settings settings) { } /** - * Returns a map of all namespaces to it's values give the provided settings + * Returns a map of all namespaces to its values given the provided settings */ public Map getAsMap(Settings settings) { Map map = new HashMap<>(); diff --git a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties index 0d11ce1d34be1..b88785bbba3a1 100644 --- a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties +++ b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties @@ -1,4 +1,6 @@ -# Set to `true` to enable the APM agent +# Set to `true` to enable the APM agent. Setting it to `false` completely +# disables the agent. You can toggle it on and off at runtime using the +# `recording` setting. enabled: false # Identifies the version of Elasticsearch in the captured trace data. @@ -26,7 +28,7 @@ service_name: elasticsearch # valid) or on the reported hostname (automatically discovered or manually # configured through hostname). # -# service_node_name: node1 +# service_node_name: node1 # An arbitrary string that identifies this deployment environment. For # example, "dev", "staging" or "prod". Can be anything you like, but must @@ -42,3 +44,12 @@ log_level: error # by the agent. Don't disable writing to a log file, as the agent will then # require extra Security Manager permissions and it's just painful. log_file: _AGENT_HOME_/../../logs/apm.log + +application_packages: org.elasticsearch,org.apache.lucene + +# Enable if you want APM to poll the APM server for updated configuration +central_config: false + +metrics_interval: 120s + +breakdown_metrics: false diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 283543546fc8f..93c14583b6772 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -56,6 +56,6 @@ public Collection createComponents( @Override public List> getSettings() { - return List.of(APMTracer.APM_ENABLED_SETTING, APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING); + return List.of(APMAgentSettings.APM_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_AGENT_SETTINGS); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java new file mode 100644 index 0000000000000..d91ff63a20fe9 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -0,0 +1,266 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.elasticsearch.common.settings.Setting; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.common.settings.Setting.Property.NodeScope; +import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; + +abstract class APMAgentSettings { + + /** + * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent + * config file, as then their values cannot be overridden dynamically via system properties. + */ + // tag::noformat + static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( + "transaction_sample_rate", "0.5" + ); + // end::noformat + + static void setAgentSetting(String key, String value) { + final String completeKey = "elastic.apm." + Objects.requireNonNull(key); + AccessController.doPrivileged((PrivilegedAction) () -> { + if (value == null || value.isEmpty()) { + System.clearProperty(completeKey); + } else { + System.setProperty(completeKey, value); + } + return null; + }); + } + + /** + * Lists all known APM agent configuration keys. + */ + private static final List AGENT_KEYS = List.of( + // Circuit-Breaker configuration options + "circuit_breaker_enabled", + "stress_monitoring_interval", + "stress_monitor_gc_stress_threshold", + "stress_monitor_gc_relief_threshold", + "stress_monitor_cpu_duration_threshold", + "stress_monitor_system_cpu_stress_threshold", + "stress_monitor_system_cpu_relief_threshold", + + // Core configuration options + "recording", + "enabled", + "instrument", + "service_name", + "service_node_name", + "service_version", + "hostname", + "environment", + "transaction_sample_rate", + "transaction_max_spans", + "sanitize_field_names", + "enable_instrumentations", + "disable_instrumentations", + "enable_experimental_instrumentations", + "unnest_exceptions", + "ignore_exceptions", + "capture_body", + "capture_headers", + "global_labels", + "classes_excluded_from_instrumentation", + "trace_methods", + "trace_methods_duration_threshold", + "central_config", + "breakdown_metrics", + "config_file", + "plugins_dir", + "use_elastic_traceparent_header", + "span_min_duration", + "cloud_provider", + "enable_public_api_annotation_inheritance", + + // HTTP configuration options + "capture_body_content_types", + "transaction_ignore_urls", + "transaction_ignore_user_agents", + "use_path_as_transaction_name", + "url_groups", + + // Huge Traces configuration options + "span_compression_enabled", + "span_compression_exact_match_max_duration", + "span_compression_same_kind_max_duration", + "exit_span_min_duration", + + // JAX-RS configuration options + "enable_jaxrs_annotation_inheritance", + "use_jaxrs_path_as_transaction_name", + + // JMX configuration options + "capture_jmx_metrics", + + // Logging configuration options + "log_level", + "log_file", + "log_ecs_reformatting", + "log_ecs_reformatting_additional_fields", + "log_ecs_formatter_allow_list", + "log_ecs_reformatting_dir", + "log_file_size", + "log_format_sout", + "log_format_file", + + // Messaging configuration options + "ignore_message_queues", + + // Metrics configuration options + "dedot_custom_metrics", + + // Profiling configuration options + "profiling_inferred_spans_enabled", + "profiling_inferred_spans_sampling_interval", + "profiling_inferred_spans_min_duration", + "profiling_inferred_spans_included_classes", + "profiling_inferred_spans_excluded_classes", + "profiling_inferred_spans_lib_directory", + + // Reporter configuration options + "secret_token", + "api_key", + "server_url", + "server_urls", + "disable_send", + "server_timeout", + "verify_server_cert", + "max_queue_size", + "include_process_args", + "api_request_time", + "api_request_size", + "metrics_interval", + "disable_metrics", + + // Serverless configuration options + "aws_lambda_handler", + "data_flush_timeout", + + // Stacktrace configuration options + "application_packages", + "stack_trace_limit", + "span_stack_trace_min_duration" + ); + + /** + * Lists all APM configuration keys that are not dynamic and must be configured via the config file. + */ + private static final List STATIC_AGENT_KEYS = List.of( + "enabled", + "service_name", + "service_node_name", + "service_version", + "hostname", + "environment", + "global_labels", + "trace_methods_duration_threshold", + "breakdown_metrics", + "plugins_dir", + "cloud_provider", + "stress_monitoring_interval", + "log_ecs_reformatting_additional_fields", + "log_ecs_formatter_allow_list", + "log_ecs_reformatting_dir", + "log_file_size", + "log_format_sout", + "log_format_file", + "profiling_inferred_spans_lib_directory", + "secret_token", + "api_key", + "verify_server_cert", + "max_queue_size", + "include_process_args", + "metrics_interval", + "disable_metrics", + "data_flush_timeout" + ); + + /** + * Lists APM agent configuration keys that cannot be configured via the cluster settings REST API. + * This may be because the setting's value must not be changed at runtime, or because it relates + * to a feature that is not required for tracing with Elasticsearch, but which configuring could + * impact performance. + */ + private static final List PROHIBITED_AGENT_KEYS = List.of( + // ES doesn't use dynamic instrumentation + "instrument", + "enable_instrumentations", + "disable_instrumentations", + "classes_excluded_from_instrumentation", + "enable_public_api_annotation_inheritance", + + // We don't use JAX-RS + "enable_jaxrs_annotation_inheritance", + "use_jaxrs_path_as_transaction_name", + + // Must be enabled to use OpenTelemetry + "enable_experimental_instrumentations", + + // For now, we don't use central config + "central_config", + + // Config file path can't be changed + "config_file", + + // The use case for capturing traces but not sending them doesn't apply to ES + "disable_send", + + // We don't run ES in an AWS Lambda + "aws_lambda_handler" + ); + + static final String SETTING_PREFIX = "xpack.apm.tracing."; + + static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( + SETTING_PREFIX + "agent.", + (qualifiedKey) -> { + final String[] parts = qualifiedKey.split("\\."); + final String key = parts[parts.length - 1]; + final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); + return new Setting<>(qualifiedKey, defaultValue, (value) -> { + if (AGENT_KEYS.contains(key) == false) { + throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); + } + if (STATIC_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException( + "Cannot set [" + + qualifiedKey + + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" + ); + } + if (PROHIBITED_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); + } + + return value; + }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); + } + ); + + static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( + SETTING_PREFIX + "names.include", + Collections.emptyList(), + Function.identity(), + OperatorDynamic, + NodeScope + ); + + static final Setting APM_ENABLED_SETTING = Setting.boolSetting(SETTING_PREFIX + "enabled", false, OperatorDynamic, NodeScope); +} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index f89534d3d7ffd..f1a378f7c4f7d 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -16,13 +16,11 @@ import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -32,33 +30,20 @@ import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; -import java.util.function.Function; import java.util.stream.Collectors; -import static org.elasticsearch.common.settings.Setting.Property.Dynamic; -import static org.elasticsearch.common.settings.Setting.Property.NodeScope; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_DEFAULT_SETTINGS; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; +import static org.elasticsearch.xpack.apm.APMAgentSettings.SETTING_PREFIX; public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { - private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); - - private static final Set TRACE_HEADERS = Set.of(Task.TRACE_PARENT_HTTP_HEADER, Task.TRACE_STATE); - - static final Setting APM_ENABLED_SETTING = Setting.boolSetting("xpack.apm.tracing.enabled", false, Dynamic, NodeScope); - static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( - "xpack.apm.tracing.names.include", - Collections.emptyList(), - Function.identity(), - Dynamic, - NodeScope - ); - private record ContextWrapper(Context context) { Span span() { return Span.fromContextOrNull(this.context); @@ -86,16 +71,32 @@ public APMTracer(Settings settings, ClusterService clusterService) { this.clusterService = Objects.requireNonNull(clusterService); this.enabled = APM_ENABLED_SETTING.get(settings); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); - clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); - } - public boolean isEnabled() { - return enabled; + // Apply default values for some system properties. Although we configure + // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't + // do anything if those settings are never configured. + APM_AGENT_DEFAULT_SETTINGS.keySet() + .forEach( + key -> APMAgentSettings.setAgentSetting( + key, + APM_AGENT_SETTINGS.getConcreteSetting(SETTING_PREFIX + "agent." + key).get(settings) + ) + ); + + // Then apply values from the settings in the cluster state + APM_AGENT_SETTINGS.getAsMap(settings).forEach(APMAgentSettings::setAgentSetting); + + final ClusterSettings clusterSettings = clusterService.getClusterSettings(); + clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); + clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); + clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(APMAgentSettings::setAgentSetting), (x, y) -> {}); } private void setEnabled(boolean enabled) { this.enabled = enabled; + // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to + // minimise its impact to a running Elasticsearch. + APMAgentSettings.setAgentSetting("recording", Boolean.toString(enabled)); if (enabled) { createApmServices(); } else { @@ -120,9 +121,7 @@ protected void doStop() { } @Override - protected void doClose() { - - } + protected void doClose() {} private void createApmServices() { assert this.enabled; @@ -131,6 +130,7 @@ private void createApmServices() { this.services = AccessController.doPrivileged((PrivilegedAction) () -> { var openTelemetry = GlobalOpenTelemetry.get(); var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); + return new APMServices(tracer, openTelemetry); }); } @@ -321,6 +321,6 @@ public String get(Map carrier, String key) { } private static boolean isSupportedContextKey(String key) { - return TRACE_HEADERS.contains(key); + return Task.TRACE_PARENT_HTTP_HEADER.equals(key) || Task.TRACE_STATE.equals(key); } } diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index be49209da37db..c2c58659d6f5e 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -10,6 +10,7 @@ grant { permission java.lang.RuntimePermission "accessSystemModules"; permission java.lang.RuntimePermission "createClassLoader"; permission java.lang.RuntimePermission "getClassLoader"; + permission java.util.PropertyPermission "elastic.apm.*", "write"; }; grant codeBase "${codebase.elastic-apm-agent}" { From 08da3a34893e8496096abfaccf735c8f24d3eed9 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 14:22:59 +0100 Subject: [PATCH 67/90] Fix apm settings to work under assertions --- .../java/org/elasticsearch/xpack/apm/APM.java | 6 ++- .../xpack/apm/APMAgentSettings.java | 53 ++++++++++--------- 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 93c14583b6772..eda3d48470c00 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -56,6 +56,10 @@ public Collection createComponents( @Override public List> getSettings() { - return List.of(APMAgentSettings.APM_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_AGENT_SETTINGS); + return List.of( + APMAgentSettings.APM_ENABLED_SETTING, + APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, + APMAgentSettings.APM_AGENT_SETTINGS + ); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index d91ff63a20fe9..7025d8a8dddef 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.apm; +import org.elasticsearch.Assertions; import org.elasticsearch.common.settings.Setting; import java.security.AccessController; @@ -228,31 +229,35 @@ static void setAgentSetting(String key, String value) { static final String SETTING_PREFIX = "xpack.apm.tracing."; - static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( - SETTING_PREFIX + "agent.", - (qualifiedKey) -> { - final String[] parts = qualifiedKey.split("\\."); - final String key = parts[parts.length - 1]; - final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); - return new Setting<>(qualifiedKey, defaultValue, (value) -> { - if (AGENT_KEYS.contains(key) == false) { - throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); - } - if (STATIC_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException( - "Cannot set [" - + qualifiedKey - + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" - ); - } - if (PROHIBITED_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); - } - + static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting(SETTING_PREFIX + "agent.", (qualifiedKey) -> { + final String[] parts = qualifiedKey.split("\\."); + final String key = parts[parts.length - 1]; + final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); + return new Setting<>(qualifiedKey, defaultValue, (value) -> { + // The `Setting` constructor asserts that a setting's parser doesn't return null when called with the default + // value. This makes less sense for prefix settings, but is particularly problematic here since we validate + // the setting name and reject unknown keys. Thus, if assertions are enabled, we have to tolerate the "_na_" key, + // which comes from `Setting#prefixKeySetting()`. + if (Assertions.ENABLED && qualifiedKey.equals("_na_")) { return value; - }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); - } - ); + } + if (AGENT_KEYS.contains(key) == false) { + throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); + } + if (STATIC_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException( + "Cannot set [" + + qualifiedKey + + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" + ); + } + if (PROHIBITED_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); + } + + return value; + }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); + }); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( SETTING_PREFIX + "names.include", From f63a154c1c0b10ac6def13e2f59aa97ec567aa43 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 14:37:33 +0100 Subject: [PATCH 68/90] Updates to TRACING.md --- TRACING.md | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/TRACING.md b/TRACING.md index 547e3869849aa..dc6da19480ecd 100644 --- a/TRACING.md +++ b/TRACING.md @@ -11,14 +11,28 @@ perform instrumentation and tracing must use these abstractions. Separately, there is the [`apm-integration`](./x-pack/plugins/apm-integration/) module, which works with the OpenTelemetry API directly to record trace data. +Underneath the OTel API, we use Elastic's [APM agent for Java][agent], which +attaches at runtime to the Elasticsearch JVM and removes the need for +Elasticsearch to hard-code the use of an SDK. ## How is tracing configured? + * The `xpack.apm.tracing.enabled` setting must be set to `true` + * The APM agent must be both enabled and configured with server credentials. + See below. + We have a config file in [`config/elasticapm.properties`][config], which configures settings that are not dynamic, or should not be changed at runtime. Other settings can be configured at runtime by using the cluster settings API, and setting `xpack.apm.tracing.agent.` with a string value, where `` -is the APM agent key that you want to configure. +is the APM agent key that you want to configure. For example, to change the +sampling rate: + + curl -XPUT \ + -H "Content-type: application/json" \ + -u "$USERNAME:$PASSWORD" \ + -d '{ "persistent": { "xpack.apm.tracing.agent.transaction_sample_rate": "0.75" } }' \ + https://localhost:9200/_cluster/settings ### More details about configuration @@ -31,21 +45,14 @@ the config file, and override them via system properties. Instead, static or sensitive config values are put in the config file, and dynamic settings are left entirely to the system properties. The Elasticsearch APM plugin has appropriate security access to set the APM-related system -properties. +properties. Calls to the ES settings REST API are translated into system +property writes, which the agent later picks up and applies. ## Where is tracing data sent? -You need to have an OpenTelemetry server running somewhere. For example, you can +You need to have an APM server running somewhere. For example, you can create a deployment in Elastic Cloud with Elastic's APM integration. -## How is tracing data sent? - -We use Elastic's APM agent for Java, which attaches at runtime to the -Elasticsearch JVM, which removes the need for Elasticsearch to hard-code the use -of an SDK. The agent is configured using a configuration file at -"config/elasticapm.properties". By default, the agent is disabled, so it is -present as a Java agent, but will do nothing. - ## What do we trace? We primarily trace "tasks". The tasks framework in Elasticsearch allows work to @@ -123,3 +130,4 @@ explicitly opening a scope via the `Tracer`. [tracing]: ./server/src/main/java/org/elasticsearch/tracing/ [config]: ./x-pack/plugin/apm-integration/src/main/config/elasticapm.properties [agent-config]: https://www.elastic.co/guide/en/apm/agent/java/master/configuration.html +[agent]: https://www.elastic.co/guide/en/apm/agent/java/current/index.html From 31ff299a832ecce085ac1e52eb4d7673f058ecf5 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 14:38:19 +0100 Subject: [PATCH 69/90] Tweaks --- run.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/run.sh b/run.sh index 290a3e23b067c..1caaedf3c09fd 100755 --- a/run.sh +++ b/run.sh @@ -35,11 +35,15 @@ fi # 2. Set the server URL # 3. Set the secret token perl -p -i -e " s|enabled: false|enabled: true| ; s|# server_url.*|server_url: $SERVER_URL| ; s|# secret_token.*|secret_token: $SECRET_TOKEN|" config/elasticapm.properties +# perl -p -i -e " s|log_level: error|log_level: debug| " config/elasticapm.properties - +# Require a debugger on 5007 in order to run: # export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:5007 " + +# Just run but expose a debugging server on 5007 # export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007 " +# Hardcore security manager debugging # export ES_JAVA_OPTS="-Djava.security.debug=failure" # export ES_JAVA_OPTS="-Djava.security.debug=access,failure" From fd6a9a900045e36e97524f2f0af617ca074eba2d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 16:49:28 +0100 Subject: [PATCH 70/90] More testing --- .../org/elasticsearch/xpack/apm/ApmIT.java | 8 +- .../elasticsearch/rest/RestController.java | 2 +- .../http/DefaultRestChannelTests.java | 41 +++++++-- .../rest/RestControllerTests.java | 89 ++++++++++--------- 4 files changed, 87 insertions(+), 53 deletions(-) diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index 38cba01fd4224..a83c099c6c9c2 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -39,11 +39,11 @@ public void configureTracing() throws IOException { final Request request = new Request("PUT", "/_cluster/settings"); request.setOptions(requestOptions); + // The default sample rate is lower, meaning the traces that want to record might be skipped. request.setJsonEntity(""" { "persistent": { "xpack.apm.tracing.agent.transaction_sample_rate": "1.0" } } """); - final Response response = client().performRequest(request); - assertOK(response); + assertOK(client().performRequest(request)); } /** @@ -63,7 +63,6 @@ private void checkTracesDataStream() throws IOException { private void assertTracesExist() throws Exception { assertBusy(() -> { - logger.error("Looping..."); final Request tracesSearchRequest = new Request("GET", "/traces-apm-default/_search"); tracesSearchRequest.setJsonEntity(""" { @@ -88,7 +87,8 @@ private void generateTraces() throws IOException { } /** - * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES. + * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES as well as our test, so declare + * that we need to preserve the cluster in order to prevent the usual cleanup logic from running (and inevitably failing). */ @Override protected boolean preserveClusterUponCompletion() { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b9bbc513bb0b5..99faa630eb85f 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -301,7 +301,7 @@ public void registerHandler(final RestHandler handler) { @Override public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE); - try (var ignored = threadContext.newTraceContext()) { + try { tryAllHandlers(request, channel, threadContext); } catch (Exception e) { try { diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 354df43465469..0d06dbdd0d314 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; @@ -27,9 +28,11 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ActionListenerUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; @@ -43,7 +46,10 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -51,22 +57,27 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class DefaultRestChannelTests extends ESTestCase { private ThreadPool threadPool; private MockBigArrays bigArrays; private HttpChannel httpChannel; + private HttpTracer httpTracer; @Before public void setup() { httpChannel = mock(HttpChannel.class); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + httpTracer = mock(HttpTracer.class); } @After @@ -147,7 +158,7 @@ public void testHeadersSet() { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); TestRestResponse resp = new TestRestResponse(); final String customHeader = "custom-header"; @@ -183,7 +194,7 @@ public void testCookiesSet() { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); @@ -211,7 +222,7 @@ public void testReleaseInListener() throws IOException { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); final BytesRestResponse response = new BytesRestResponse( RestStatus.INTERNAL_SERVER_ERROR, @@ -278,7 +289,7 @@ public void testConnectionClose() throws Exception { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); Class> listenerClass = (Class>) (Class) ActionListener.class; @@ -317,7 +328,7 @@ public RestRequest.Method method() { HttpHandlingSettings.fromSettings(Settings.EMPTY), threadPool.getThreadContext(), CorsHandler.fromSettings(Settings.EMPTY), - null + httpTracer ); // ESTestCase#after will invoke ensureAllArraysAreReleased which will fail if the response content was not released @@ -363,7 +374,7 @@ public HttpResponse createResponse(RestStatus status, BytesReference content) { HttpHandlingSettings.fromSettings(Settings.EMPTY), threadPool.getThreadContext(), CorsHandler.fromSettings(Settings.EMPTY), - null + httpTracer ); // ESTestCase#after will invoke ensureAllArraysAreReleased which will fail if the response content was not released @@ -380,6 +391,22 @@ public HttpResponse createResponse(RestStatus status, BytesReference content) { } } + /** + * Check that when a REST channel sends a response, then it stops the active trace. + */ + public void testTraceStopped() { + // Configure the httpChannel mock to call the action listener passed to it when sending a response + doAnswer(invocationOnMock -> { + ActionListener listener = invocationOnMock.getArgument(1); + listener.onResponse(null); + return null; + }).when(httpChannel).sendResponse(any(HttpResponse.class), anyActionListener()); + + executeRequest(Settings.EMPTY, "request-host"); + + verify(httpTracer).onTraceStopped(any(DefaultRestChannel.class)); + } + private TestHttpResponse executeRequest(final Settings settings, final String host) { return executeRequest(settings, null, host); } @@ -401,7 +428,7 @@ private TestHttpResponse executeRequest(final Settings settings, final String or httpHandlingSettings, threadPool.getThreadContext(), new CorsHandler(CorsHandler.buildConfig(settings)), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 70d0f3247ca46..136188e8071a1 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -46,7 +46,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -59,13 +58,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class RestControllerTests extends ESTestCase { @@ -123,22 +121,6 @@ public void testApplyRelevantHeaders() throws Exception { restHeaders.put("header.2", Collections.singletonList("true")); restHeaders.put("header.3", Collections.singletonList("false")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<>() { - @Override - public boolean hasNext() { - return false; - } - - @Override - public MethodHandlers next() { - return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> { - assertEquals("true", threadContext.getHeader("header.1")); - assertEquals("true", threadContext.getHeader("header.2")); - assertNull(threadContext.getHeader("header.3")); - }); - } - }); AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the @@ -169,37 +151,27 @@ public void testRequestWithDisallowedMultiValuedHeader() { assertTrue(channel.getSendResponseCalled()); } - public void testTraceParentAndTraceId() throws Exception { + /** + * Check that the REST controller picks up and propagates W3C trace context headers via the {@link ThreadContext}. + * @see Trace Context - W3C Recommendation + */ + public void testTraceParentAndTraceId() { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set headers = new HashSet<>(Arrays.asList(new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false))); final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); - restHeaders.put( - Task.TRACE_PARENT_HTTP_HEADER, - Collections.singletonList("00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01") - ); + final String traceParentValue = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"; + restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<>() { - @Override - public boolean hasNext() { - return false; - } - - @Override - public MethodHandlers next() { - return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> { - assertEquals("0af7651916cd43dd8448eb211c80319c", threadContext.getHeader(Task.TRACE_ID)); - assertNull(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER)); - }); - } - }); AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); + restController.dispatchRequest(fakeRequest, channel, threadContext); + // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the // context in this test - assertEquals("0af7651916cd43dd8448eb211c80319c", threadContext.getHeader(Task.TRACE_ID)); - assertNull(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER)); + assertThat(threadContext.getHeader(Task.TRACE_ID), equalTo("0af7651916cd43dd8448eb211c80319c")); + assertThat(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER), nullValue()); + assertThat(threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER), equalTo(traceParentValue)); } public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { @@ -734,6 +706,25 @@ public void testDispatchCompatibleHandler() { assertTrue(channel.getSendResponseCalled()); } + /** + * Check that the REST controller initiates tracing on REST channels. + */ + public void testDispatchStartsTracingOnChannels() { + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + + RestRequest fakeRequest = requestWithContent(randomCompatibleMediaType(RestApiVersion.minimumSupported())); + AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + + RestHandler handler = (request, channel1, client) -> channel1.sendResponse( + new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY) + ); + restController.registerHandler(GET, "/foo", RestApiVersion.minimumSupported(), handler); + + restController.dispatchRequest(fakeRequest, channel, new ThreadContext(Settings.EMPTY)); + + assertThat("Expected tracing to have started on REST channel", channel.traceStarted, is(true)); + } + public void testDispatchCompatibleRequestToNewlyAddedHandler() { RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); @@ -876,6 +867,8 @@ public static final class AssertingChannel extends AbstractRestChannel { private final RestStatus expectedStatus; private final AtomicReference responseReference = new AtomicReference<>(); + private boolean traceStarted = false; + private boolean traceStopped = false; public AssertingChannel(RestRequest request, boolean detailedErrorsEnabled, RestStatus expectedStatus) { super(request, detailedErrorsEnabled); @@ -896,6 +889,20 @@ boolean getSendResponseCalled() { return getRestResponse() != null; } + @Override + public void startTrace() { + traceStarted = true; + } + + @Override + public void stopTrace() { + assertThat("tried to stop a trace but it wasn't started", traceStarted, is(true)); + traceStopped = true; + } + + public boolean isTraceComplete() { + return traceStarted && traceStopped; + } } private static final class ExceptionThrowingChannel extends AbstractRestChannel { From 562e6ed5fa5225c1b20a1c2414c8ab834831801f Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 29 Apr 2022 16:49:28 +0100 Subject: [PATCH 71/90] More testing --- .../org/elasticsearch/xpack/apm/ApmIT.java | 8 +- .../elasticsearch/rest/RestController.java | 2 +- .../http/DefaultRestChannelTests.java | 34 +++++-- .../rest/RestControllerTests.java | 89 ++++++++++--------- 4 files changed, 80 insertions(+), 53 deletions(-) diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index 38cba01fd4224..a83c099c6c9c2 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -39,11 +39,11 @@ public void configureTracing() throws IOException { final Request request = new Request("PUT", "/_cluster/settings"); request.setOptions(requestOptions); + // The default sample rate is lower, meaning the traces that want to record might be skipped. request.setJsonEntity(""" { "persistent": { "xpack.apm.tracing.agent.transaction_sample_rate": "1.0" } } """); - final Response response = client().performRequest(request); - assertOK(response); + assertOK(client().performRequest(request)); } /** @@ -63,7 +63,6 @@ private void checkTracesDataStream() throws IOException { private void assertTracesExist() throws Exception { assertBusy(() -> { - logger.error("Looping..."); final Request tracesSearchRequest = new Request("GET", "/traces-apm-default/_search"); tracesSearchRequest.setJsonEntity(""" { @@ -88,7 +87,8 @@ private void generateTraces() throws IOException { } /** - * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES. + * We don't need to clean up the cluster, particularly as we have Kibana and APM server using ES as well as our test, so declare + * that we need to preserve the cluster in order to prevent the usual cleanup logic from running (and inevitably failing). */ @Override protected boolean preserveClusterUponCompletion() { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b9bbc513bb0b5..99faa630eb85f 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -301,7 +301,7 @@ public void registerHandler(final RestHandler handler) { @Override public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { threadContext.addResponseHeader(ELASTIC_PRODUCT_HTTP_HEADER, ELASTIC_PRODUCT_HTTP_HEADER_VALUE); - try (var ignored = threadContext.newTraceContext()) { + try { tryAllHandlers(request, channel, threadContext); } catch (Exception e) { try { diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 354df43465469..2fd40c6d50b91 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -44,6 +44,7 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -52,6 +53,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -61,12 +63,14 @@ public class DefaultRestChannelTests extends ESTestCase { private ThreadPool threadPool; private MockBigArrays bigArrays; private HttpChannel httpChannel; + private HttpTracer httpTracer; @Before public void setup() { httpChannel = mock(HttpChannel.class); threadPool = new TestThreadPool("test"); bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + httpTracer = mock(HttpTracer.class); } @After @@ -147,7 +151,7 @@ public void testHeadersSet() { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); TestRestResponse resp = new TestRestResponse(); final String customHeader = "custom-header"; @@ -183,7 +187,7 @@ public void testCookiesSet() { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); @@ -211,7 +215,7 @@ public void testReleaseInListener() throws IOException { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); final BytesRestResponse response = new BytesRestResponse( RestStatus.INTERNAL_SERVER_ERROR, @@ -278,7 +282,7 @@ public void testConnectionClose() throws Exception { handlingSettings, threadPool.getThreadContext(), CorsHandler.fromSettings(settings), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); Class> listenerClass = (Class>) (Class) ActionListener.class; @@ -317,7 +321,7 @@ public RestRequest.Method method() { HttpHandlingSettings.fromSettings(Settings.EMPTY), threadPool.getThreadContext(), CorsHandler.fromSettings(Settings.EMPTY), - null + httpTracer ); // ESTestCase#after will invoke ensureAllArraysAreReleased which will fail if the response content was not released @@ -363,7 +367,7 @@ public HttpResponse createResponse(RestStatus status, BytesReference content) { HttpHandlingSettings.fromSettings(Settings.EMPTY), threadPool.getThreadContext(), CorsHandler.fromSettings(Settings.EMPTY), - null + httpTracer ); // ESTestCase#after will invoke ensureAllArraysAreReleased which will fail if the response content was not released @@ -380,6 +384,22 @@ public HttpResponse createResponse(RestStatus status, BytesReference content) { } } + /** + * Check that when a REST channel sends a response, then it stops the active trace. + */ + public void testTraceStopped() { + // Configure the httpChannel mock to call the action listener passed to it when sending a response + doAnswer(invocationOnMock -> { + ActionListener listener = invocationOnMock.getArgument(1); + listener.onResponse(null); + return null; + }).when(httpChannel).sendResponse(any(HttpResponse.class), anyActionListener()); + + executeRequest(Settings.EMPTY, "request-host"); + + verify(httpTracer).onTraceStopped(any(DefaultRestChannel.class)); + } + private TestHttpResponse executeRequest(final Settings settings, final String host) { return executeRequest(settings, null, host); } @@ -401,7 +421,7 @@ private TestHttpResponse executeRequest(final Settings settings, final String or httpHandlingSettings, threadPool.getThreadContext(), new CorsHandler(CorsHandler.buildConfig(settings)), - null + httpTracer ); channel.sendResponse(new TestRestResponse()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 70d0f3247ca46..136188e8071a1 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -46,7 +46,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -59,13 +58,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class RestControllerTests extends ESTestCase { @@ -123,22 +121,6 @@ public void testApplyRelevantHeaders() throws Exception { restHeaders.put("header.2", Collections.singletonList("true")); restHeaders.put("header.3", Collections.singletonList("false")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<>() { - @Override - public boolean hasNext() { - return false; - } - - @Override - public MethodHandlers next() { - return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> { - assertEquals("true", threadContext.getHeader("header.1")); - assertEquals("true", threadContext.getHeader("header.2")); - assertNull(threadContext.getHeader("header.3")); - }); - } - }); AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the @@ -169,37 +151,27 @@ public void testRequestWithDisallowedMultiValuedHeader() { assertTrue(channel.getSendResponseCalled()); } - public void testTraceParentAndTraceId() throws Exception { + /** + * Check that the REST controller picks up and propagates W3C trace context headers via the {@link ThreadContext}. + * @see Trace Context - W3C Recommendation + */ + public void testTraceParentAndTraceId() { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set headers = new HashSet<>(Arrays.asList(new RestHeaderDefinition(Task.TRACE_PARENT_HTTP_HEADER, false))); final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); - restHeaders.put( - Task.TRACE_PARENT_HTTP_HEADER, - Collections.singletonList("00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01") - ); + final String traceParentValue = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"; + restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<>() { - @Override - public boolean hasNext() { - return false; - } - - @Override - public MethodHandlers next() { - return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> { - assertEquals("0af7651916cd43dd8448eb211c80319c", threadContext.getHeader(Task.TRACE_ID)); - assertNull(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER)); - }); - } - }); AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); + restController.dispatchRequest(fakeRequest, channel, threadContext); + // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the // context in this test - assertEquals("0af7651916cd43dd8448eb211c80319c", threadContext.getHeader(Task.TRACE_ID)); - assertNull(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER)); + assertThat(threadContext.getHeader(Task.TRACE_ID), equalTo("0af7651916cd43dd8448eb211c80319c")); + assertThat(threadContext.getHeader(Task.TRACE_PARENT_HTTP_HEADER), nullValue()); + assertThat(threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER), equalTo(traceParentValue)); } public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { @@ -734,6 +706,25 @@ public void testDispatchCompatibleHandler() { assertTrue(channel.getSendResponseCalled()); } + /** + * Check that the REST controller initiates tracing on REST channels. + */ + public void testDispatchStartsTracingOnChannels() { + RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); + + RestRequest fakeRequest = requestWithContent(randomCompatibleMediaType(RestApiVersion.minimumSupported())); + AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + + RestHandler handler = (request, channel1, client) -> channel1.sendResponse( + new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY) + ); + restController.registerHandler(GET, "/foo", RestApiVersion.minimumSupported(), handler); + + restController.dispatchRequest(fakeRequest, channel, new ThreadContext(Settings.EMPTY)); + + assertThat("Expected tracing to have started on REST channel", channel.traceStarted, is(true)); + } + public void testDispatchCompatibleRequestToNewlyAddedHandler() { RestController restController = new RestController(Collections.emptySet(), null, client, circuitBreakerService, usageService); @@ -876,6 +867,8 @@ public static final class AssertingChannel extends AbstractRestChannel { private final RestStatus expectedStatus; private final AtomicReference responseReference = new AtomicReference<>(); + private boolean traceStarted = false; + private boolean traceStopped = false; public AssertingChannel(RestRequest request, boolean detailedErrorsEnabled, RestStatus expectedStatus) { super(request, detailedErrorsEnabled); @@ -896,6 +889,20 @@ boolean getSendResponseCalled() { return getRestResponse() != null; } + @Override + public void startTrace() { + traceStarted = true; + } + + @Override + public void stopTrace() { + assertThat("tried to stop a trace but it wasn't started", traceStarted, is(true)); + traceStopped = true; + } + + public boolean isTraceComplete() { + return traceStarted && traceStopped; + } } private static final class ExceptionThrowingChannel extends AbstractRestChannel { From f8431e7bb104994e91cb96e2aca2a10d4785a1bb Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 2 May 2022 13:41:01 +0100 Subject: [PATCH 72/90] More TaskManager unit tests --- .../elasticsearch/tasks/TaskManagerTests.java | 78 ++++++++++++++++++- 1 file changed, 77 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java index 2ebd2db289d26..ffd07b424ac22 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java @@ -10,8 +10,14 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.node.tasks.TransportTasksActionTests; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -265,6 +271,9 @@ public void testTaskAccounting() { assertNull(taskManager.childTasksPerConnection(task1.getId(), connection1)); } + /** + * Check that registering a task also causes tracing to be started on that task. + */ public void testRegisterTaskStartsTracing() { final Tracer mockTracer = Mockito.mock(Tracer.class); final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), mockTracer); @@ -272,8 +281,28 @@ public void testRegisterTaskStartsTracing() { final Task task = taskManager.register("testType", "testAction", new TaskAwareRequest() { @Override - public void setParentTask(TaskId taskId) { + public void setParentTask(TaskId taskId) {} + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; } + }); + + verify(mockTracer).onTraceStarted(any(), eq(task)); + } + + /** + * Check that unregistering a task also causes tracing to be stopped on that task. + */ + public void testUnregisterTaskStopsTracing() { + final Tracer mockTracer = Mockito.mock(Tracer.class); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), mockTracer); + + final Task task = taskManager.register("testType", "testAction", new TaskAwareRequest() { + + @Override + public void setParentTask(TaskId taskId) {} @Override public TaskId getParentTask() { @@ -281,7 +310,54 @@ public TaskId getParentTask() { } }); + taskManager.unregister(task); + + verify(mockTracer).onTraceStopped(eq(task)); + } + + /** + * Check that registering and executing a task also causes tracing to be started and stopped on that task. + */ + public void testRegisterAndExecuteStartsAndStopsTracing() { + final Tracer mockTracer = Mockito.mock(Tracer.class); + final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of(), mockTracer); + + final Task task = taskManager.registerAndExecute( + "testType", + new TransportAction("actionName", new ActionFilters(Set.of()), taskManager) { + @Override + protected void doExecute(Task task, ActionRequest request, ActionListener listener) { + listener.onResponse(new ActionResponse() { + @Override + public void writeTo(StreamOutput out) {} + }); + } + }, + new ActionRequest() { + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public TaskId getParentTask() { + return TaskId.EMPTY_TASK_ID; + } + }, + null, + new TaskListener<>() { + @Override + public void onResponse(Task task, ActionResponse actionResponse) {} + + @Override + public void onFailure(Task task, Exception e) { + throw new AssertionError(e); + } + } + ); + verify(mockTracer).onTraceStarted(any(), eq(task)); + verify(mockTracer).onTraceStopped(eq(task)); } static class CancellableRequest extends TransportRequest { From c55632a1d96b256782bbe9f589503e3a98462b87 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 3 May 2022 15:29:38 +0100 Subject: [PATCH 73/90] Add unit testing --- x-pack/plugin/apm-integration/build.gradle | 19 +- .../elastic-apm-agent-1.30.0.jar.sha1 | 1 - .../elastic-apm-agent-1.30.1.jar.sha1 | 1 + .../licenses/log4j-slf4j-impl-2.17.1.jar.sha1 | 1 + .../licenses/log4j-slf4j-impl-LICENSE.txt | 202 +++++++++ .../licenses/log4j-slf4j-impl-NOTICE.txt | 8 + .../opentelemetry-api-1.12.0.jar.sha1 | 1 + .../licenses/opentelemetry-api-1.9.0.jar.sha1 | 1 - .../opentelemetry-context-1.12.0.jar.sha1 | 1 + .../opentelemetry-context-1.9.0.jar.sha1 | 1 - ...pentelemetry-semconv-1.12.0-alpha.jar.sha1 | 1 + ...opentelemetry-semconv-1.9.0-alpha.jar.sha1 | 1 - .../licenses/slf4j-api-1.6.2.jar.sha1 | 1 + .../licenses/slf4j-api-LICENSE.txt | 21 + .../licenses/slf4j-api-NOTICE.txt | 0 .../org/elasticsearch/xpack/apm/ApmIT.java | 396 ------------------ .../xpack/apm/TestOpenTelemetry.java | 235 ----------- .../java/org/elasticsearch/xpack/apm/APM.java | 2 +- .../xpack/apm/APMAgentSettings.java | 84 ++-- .../elasticsearch/xpack/apm/APMTracer.java | 164 ++++---- .../xpack/apm/APMTracerTests.java | 211 ++++++++++ 21 files changed, 595 insertions(+), 757 deletions(-) delete mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-NOTICE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/slf4j-api-1.6.2.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/slf4j-api-LICENSE.txt create mode 100644 x-pack/plugin/apm-integration/licenses/slf4j-api-NOTICE.txt delete mode 100644 x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java delete mode 100644 x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java create mode 100644 x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index dfd5bbf4ece4c..b1225f69bd0dc 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -4,16 +4,16 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.internal-test-artifact' +// apply plugin: 'elasticsearch.internal-cluster-test' +// apply plugin: 'elasticsearch.internal-test-artifact' esplugin { - name 'apm-integration' + name 'x-pack-apm-integration' description 'Provides APM integration for Elasticsearch' classname 'org.elasticsearch.xpack.apm.APM' extendedPlugins = ['x-pack-core'] - javaOpts = "-javaagent:modules/apm-integration/elastic-apm-agent-${versions.apm_agent}.jar" + javaOpts = "-javaagent:modules/x-pack-apm-integration/elastic-apm-agent-${versions.apm_agent}.jar" } dependencies { @@ -27,14 +27,17 @@ dependencies { api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" compileOnly project(path: xpackModule('core')) - internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) - internalClusterTestImplementation(testArtifact(project(xpackModule('security')))) { + testImplementation(testArtifact(project(xpackModule('core')))) + testImplementation(testArtifact(project(xpackModule('security')))) { exclude group: 'com.google.guava', module: 'guava' } } -// FIXME: no unit-test for now -tasks.named("test").configure { enabled = false } +tasks.named("thirdPartyAudit").configure { + ignoreMissingClasses( + 'org.slf4j.ext.EventData' + ) +} tasks.named("dependencyLicenses").configure { mapping from: /opentelemetry-.*/, to: 'opentelemetry' diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 deleted file mode 100644 index 2ea3c2249dd42..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bccb70b60db2ab5900f6bb91ac5a71f950365913 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 new file mode 100644 index 0000000000000..0957a24284398 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 @@ -0,0 +1 @@ +631752f53512fb9daf9ffe6cd95de80430968ad5 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 new file mode 100644 index 0000000000000..894ed8d886c3f --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 @@ -0,0 +1 @@ +84692d456bcce689355d33d68167875e486954dd \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-LICENSE.txt new file mode 100644 index 0000000000000..6279e5206de13 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-NOTICE.txt new file mode 100644 index 0000000000000..ea99ef1d4726b --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/log4j-slf4j-impl-NOTICE.txt @@ -0,0 +1,8 @@ + +Apache Log4j SLF4J Binding +Copyright 1999-2017 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + + diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 new file mode 100644 index 0000000000000..4531a5d1639d9 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 @@ -0,0 +1 @@ +47ef9e467f9b734d4e4e4df15c3bd62dd991db5d \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 deleted file mode 100644 index d056f5fd3e2cf..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -464e96a2c7467aa46cc5e9e1a721dd4f8a7e5311 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 new file mode 100644 index 0000000000000..17f2f0b1d8d9a --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 @@ -0,0 +1 @@ +7d8ec9f863faad36cbaa2f9bd10195ec5943fd70 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 deleted file mode 100644 index d4d459c0ac9f6..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fa03396a9b9e8864c3d92dce196cdd7ffe86fdb diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 new file mode 100644 index 0000000000000..401f5203ae13f --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 @@ -0,0 +1 @@ +6542c9a536144567682cf95fd7cba97dc6eacc4a \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 deleted file mode 100644 index 13cb64f5bc13e..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.9.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fe3b7c4eb863cf433594ba21dafa74206b6ab760 diff --git a/x-pack/plugin/apm-integration/licenses/slf4j-api-1.6.2.jar.sha1 b/x-pack/plugin/apm-integration/licenses/slf4j-api-1.6.2.jar.sha1 new file mode 100644 index 0000000000000..a2f93ea55802b --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/slf4j-api-1.6.2.jar.sha1 @@ -0,0 +1 @@ +8619e95939167fb37245b5670135e4feb0ec7d50 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/slf4j-api-LICENSE.txt b/x-pack/plugin/apm-integration/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..52055e61de46f --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2014 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/x-pack/plugin/apm-integration/licenses/slf4j-api-NOTICE.txt b/x-pack/plugin/apm-integration/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java deleted file mode 100644 index 73c18d2953521..0000000000000 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/ApmIT.java +++ /dev/null @@ -1,396 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.apm; - -//import io.opentelemetry.api.common.AttributeKey; -//import io.opentelemetry.sdk.trace.data.SpanData; -// -//import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; -//import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -//import org.elasticsearch.action.bulk.BulkRequestBuilder; -//import org.elasticsearch.action.index.IndexRequestBuilder; -//import org.elasticsearch.action.search.SearchAction; -//import org.elasticsearch.action.search.SearchTransportService; -//import org.elasticsearch.action.support.WriteRequest; -//import org.elasticsearch.client.Request; -//import org.elasticsearch.client.Response; -//import org.elasticsearch.cluster.coordination.PublicationTransportHandler; -//import org.elasticsearch.cluster.metadata.IndexMetadata; -//import org.elasticsearch.common.settings.SecureString; -//import org.elasticsearch.common.settings.Settings; -//import org.elasticsearch.common.util.CollectionUtils; -//import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; -//import org.elasticsearch.plugins.Plugin; -//import org.elasticsearch.plugins.PluginsService; -//import org.elasticsearch.tasks.Task; -//import org.elasticsearch.tasks.TaskId; -//import org.elasticsearch.tasks.TaskTracer; -import org.elasticsearch.test.SecurityIntegTestCase; -//import org.elasticsearch.test.SecuritySettingsSource; -//import org.elasticsearch.test.SecuritySettingsSourceField; -//import org.elasticsearch.transport.TransportService; -//import org.elasticsearch.xcontent.XContentType; -//import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -//import org.junit.After; -// -//import java.util.Collection; -//import java.util.Collections; -//import java.util.List; -//import java.util.concurrent.TimeUnit; -//import java.util.stream.Collectors; -// -//import static java.util.stream.Collectors.toList; -//import static org.elasticsearch.cluster.service.MasterService.STATE_UPDATE_ACTION_NAME; -//import static org.elasticsearch.indices.recovery.PeerRecoverySourceService.Actions.START_RECOVERY; -//import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -//import static org.hamcrest.Matchers.contains; -//import static org.hamcrest.Matchers.empty; -//import static org.hamcrest.Matchers.equalTo; -//import static org.hamcrest.Matchers.hasItems; -//import static org.hamcrest.Matchers.hasSize; -//import static org.hamcrest.Matchers.notNullValue; - -public class ApmIT extends SecurityIntegTestCase { - - // @Override - // protected Collection> nodePlugins() { - // return CollectionUtils.appendToCopy(super.nodePlugins(), APM.class); - // } - // - // @Override - // protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - // Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - // // ((MockSecureSettings) builder.getSecureSettings()).setString( - // // APMTracer.APM_ENDPOINT_SETTING.getKey(), - // // System.getProperty("tests.apm.endpoint", "") - // // ); - // // ((MockSecureSettings) builder.getSecureSettings()).setString( - // // APMTracer.APM_TOKEN_SETTING.getKey(), - // // System.getProperty("tests.apm.token", "") - // // ); - // builder.put(APMTracer.APM_ENABLED_SETTING.getKey(), true).put("xpack.security.authz.tracing", true); - // return builder.build(); - // } - // - // @Override - // protected boolean addMockHttpTransport() { - // return false; - // } - // - // @After - // public void clearRecordedSpans() { - // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - // } - // - // public void testModule() { - // List plugins = internalCluster().getAnyMasterNodeInstance(PluginsService.class).filterPlugins(APM.class); - // assertThat(plugins, hasSize(1)); - // - // TransportService transportService = internalCluster().getInstance(TransportService.class); - // final TaskTracer taskTracer = transportService.getTaskManager().getTaskTracer(); - // assertThat(taskTracer, notNullValue()); - // - // final Task testTask = new Task(randomNonNegativeLong(), "test", "action", "", TaskId.EMPTY_TASK_ID, Collections.emptyMap()); - // - // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - // - // taskTracer.onTaskRegistered(transportService.getThreadPool().getThreadContext(), testTask); - // taskTracer.onTaskUnregistered(testTask); - // - // final List capturedSpans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(); - // boolean found = false; - // final Long targetId = testTask.getId(); - // for (SpanData capturedSpan : capturedSpans) { - // if (targetId.equals(capturedSpan.getAttributes().get(AttributeKey.longKey("es.task.id")))) { - // found = true; - // assertTrue(capturedSpan.hasEnded()); - // } - // } - // assertTrue(found); - // } - // - // public void testRecordsNestedSpans() { - // - // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - // - // client().admin().cluster().prepareListTasks().get(); - // - // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - // assertThat(parentTasks, hasSize(1)); - // var parentTask = parentTasks.get(0); - // assertThat(parentTask.getParentSpanId(), equalTo("0000000000000000")); - // - // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - // assertThat(childrenTasks, hasSize(internalCluster().size())); - // for (SpanData childrenTask : childrenTasks) { - // assertThat(childrenTask.getParentSpanId(), equalTo(parentTask.getSpanId())); - // assertThat(childrenTask.getTraceId(), equalTo(parentTask.getTraceId())); - // } - // } - // - // public void testRecovery() throws Exception { - // internalCluster().ensureAtLeastNumDataNodes(2); - // - // assertAcked( - // client().admin() - // .indices() - // .prepareCreate("test-index") - // .setSettings( - // Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - // ) - // ); - // - // ensureGreen("test-index"); - // - // indexRandom(true, true, client().prepareIndex("test-index").setSource("{}", XContentType.JSON)); - // flushAndRefresh("test-index"); - // - // final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; - // spanExporter.clear(); - // - // assertAcked( - // client().admin() - // .indices() - // .prepareUpdateSettings("test-index") - // .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)) - // ); - // - // ensureGreen("test-index"); - // - // final SpanData clusterUpdateSpan = spanExporter.findSpanByName(STATE_UPDATE_ACTION_NAME) - // .findAny() - // .orElseThrow(() -> new AssertionError("not found")); - // - // final List clusterUpdateChildActions = spanExporter.findSpan( - // spanData -> spanData.getParentSpanId().equals(clusterUpdateSpan.getSpanId()) - // ).map(SpanData::getName).collect(toList()); - // - // assertThat( - // clusterUpdateChildActions, - // hasItems(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME, PublicationTransportHandler.COMMIT_STATE_ACTION_NAME) - // ); - // - // final SpanData recoverySpan = spanExporter.findSpanByName(START_RECOVERY) - // .findAny() - // .orElseThrow(() -> new AssertionError("not found")); - // final List recoveryChildActions = spanExporter.findSpan( - // spanData -> spanData.getParentSpanId().equals(recoverySpan.getSpanId()) - // ).map(SpanData::getName).collect(toList()); - // - // assertThat( - // recoveryChildActions, - // hasItems( - // PeerRecoveryTargetService.Actions.FILES_INFO, - // PeerRecoveryTargetService.Actions.FILE_CHUNK, - // PeerRecoveryTargetService.Actions.CLEAN_FILES, - // PeerRecoveryTargetService.Actions.PREPARE_TRANSLOG, - // PeerRecoveryTargetService.Actions.FINALIZE - // ) - // ); - // - // } - // - // public void testSearch() throws Exception { - // - // internalCluster().ensureAtLeastNumDataNodes(2); - // final int nodeCount = internalCluster().numDataNodes(); - // - // assertAcked( - // client().admin() - // .indices() - // .prepareCreate("test-matching") - // .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") - // .setSettings( - // Settings.builder() - // .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - // .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) - // ) - // ); - // - // assertAcked( - // client().admin() - // .indices() - // .prepareCreate("test-notmatching") - // .setMapping("{\"properties\":{\"message\":{\"type\":\"text\"},\"@timestamp\":{\"type\":\"date\"}}}") - // .setSettings( - // Settings.builder() - // .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - // .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, nodeCount * 6) - // ) - // ); - // - // ensureGreen("test-matching", "test-notmatching"); - // - // final String matchingDate = "2021-11-17"; - // final String nonMatchingDate = "2021-01-01"; - // - // final BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - // - // for (int i = 0; i < 1000; i++) { - // final boolean isMatching = randomBoolean(); - // final IndexRequestBuilder indexRequestBuilder = client().prepareIndex(isMatching ? "test-matching" : "test-notmatching"); - // indexRequestBuilder.setSource( - // "{\"@timestamp\":\"" + (isMatching ? matchingDate : nonMatchingDate) + "\",\"message\":\"\"}", - // XContentType.JSON - // ); - // bulkRequestBuilder.add(indexRequestBuilder); - // } - // - // assertFalse(bulkRequestBuilder.execute().actionGet(10, TimeUnit.SECONDS).hasFailures()); - // - // final APMTracer.CapturingSpanExporter spanExporter = APMTracer.CAPTURING_SPAN_EXPORTER; - // spanExporter.clear(); - // - // final Request searchRequest = new Request("GET", "_search"); - // searchRequest.addParameter("search_type", "query_then_fetch"); - // searchRequest.addParameter("pre_filter_shard_size", "1"); - // searchRequest.setJsonEntity("{\"query\":{\"range\":{\"@timestamp\":{\"gt\":\"2021-11-01\"}}}}"); - // searchRequest.setOptions( - // searchRequest.getOptions() - // .toBuilder() - // .addHeader( - // "Authorization", - // UsernamePasswordToken.basicAuthHeaderValue( - // SecuritySettingsSource.TEST_USER_NAME, - // new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()) - // ) - // ) - // ); - // - // final Response searchResponse = getRestClient().performRequest(searchRequest); - // - // assertTrue(spanExporter.findSpanByName(SearchAction.NAME).findAny().isPresent()); - // assertTrue(spanExporter.findSpanByName(SearchTransportService.QUERY_CAN_MATCH_NODE_NAME).findAny().isPresent()); - // } - // - // public void testDoesNotRecordSpansWhenDisabled() { - // - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), false).build() - // ) - // ) - // .actionGet(); - // - // try { - // APMTracer.CAPTURING_SPAN_EXPORTER.clear(); - // - // client().admin().cluster().prepareListTasks().get(); - // - // assertThat(APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans(), empty()); - // } finally { - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_ENABLED_SETTING.getKey(), (String) null).build() - // ) - // ) - // .actionGet(); - // } - // } - // - // public void testFilterByNameGivenSingleCompleteMatch() { - // - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "cluster:monitor/tasks/lists").build() - // ) - // ) - // .actionGet(); - // - // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - // - // try { - // client().admin().cluster().prepareListTasks().get(); - // - // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - // assertThat(parentTasks, hasSize(1)); - // - // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - // assertThat(childrenTasks, empty()); - // } finally { - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - // ) - // ) - // .actionGet(); - // } - // } - // - // public void testFilterByNameGivenSinglePattern() { - // - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists*").build() - // ) - // ) - // .actionGet(); - // - // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - // - // try { - // client().admin().cluster().prepareListTasks().get(); - // - // var parentTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists").collect(toList()); - // assertThat(parentTasks, hasSize(1)); - // - // var childrenTasks = APMTracer.CAPTURING_SPAN_EXPORTER.findSpanByName("cluster:monitor/tasks/lists[n]").collect(toList()); - // assertThat(childrenTasks, hasSize(internalCluster().size())); - // } finally { - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - // ) - // ) - // .actionGet(); - // } - // } - // - // public void testFilterByNameGivenTwoPatterns() { - // - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), "*/tasks/lists,*/nodes/stats").build() - // ) - // ) - // .actionGet(); - // - // APMTracer.CAPTURING_SPAN_EXPORTER.clear();// removing start related events - // - // try { - // client().admin().cluster().prepareListTasks().get(); - // client().admin().cluster().nodesStats(new NodesStatsRequest()).actionGet(); - // - // var spans = APMTracer.CAPTURING_SPAN_EXPORTER.getCapturedSpans().stream().map(SpanData::getName).collect(Collectors.toSet()); - // assertThat(spans, contains("cluster:monitor/nodes/stats", "cluster:monitor/tasks/lists")); - // } finally { - // client().admin() - // .cluster() - // .updateSettings( - // new ClusterUpdateSettingsRequest().persistentSettings( - // Settings.builder().put(APMTracer.APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), (String) null).build() - // ) - // ) - // .actionGet(); - // } - // } -} diff --git a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java b/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java deleted file mode 100644 index afda3ae4b9892..0000000000000 --- a/x-pack/plugin/apm-integration/src/internalClusterTest/java/org/elasticsearch/xpack/apm/TestOpenTelemetry.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.apm; - -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.SpanBuilder; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.StatusCode; -import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.api.trace.TracerProvider; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.propagation.ContextPropagators; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -// Shut up, IntelliJ -@SuppressWarnings("NullableProblems") -public class TestOpenTelemetry implements OpenTelemetry { - - public static final OpenTelemetry INSTANCE = new TestOpenTelemetry(); - - private final Tracer tracer; - - public TestOpenTelemetry() { - this.tracer = new TestTracer(); - } - - public Tracer getTracer() { - return tracer; - } - - @Override - public TracerProvider getTracerProvider() { - return new TracerProvider() { - @Override - public Tracer get(String instrumentationScopeName) { - return tracer; - } - - @Override - public Tracer get(String instrumentationScopeName, String instrumentationScopeVersion) { - return tracer; - } - }; - } - - @Override - public Tracer getTracer(String instrumentationScopeName) { - return this.tracer; - } - - @Override - public Tracer getTracer(String instrumentationScopeName, String instrumentationScopeVersion) { - return this.tracer; - } - - @Override - public ContextPropagators getPropagators() { - return ContextPropagators.noop(); - } - - class TestTracer implements Tracer { - - @Override - public SpanBuilder spanBuilder(String spanName) { - return new TestSpanBuilder(spanName); - } - } - - class TestSpanBuilder implements SpanBuilder { - private final String spanName; - private Context parentContext; - private Map attributes = new HashMap<>(); - private SpanKind spanKind; - private Long startTimestamp; - - TestSpanBuilder(String spanName) { - this.spanName = spanName; - } - - @Override - public SpanBuilder setParent(Context context) { - this.parentContext = context; - return this; - } - - @Override - public SpanBuilder setNoParent() { - this.parentContext = null; - return this; - } - - @Override - public SpanBuilder addLink(SpanContext spanContext) { - return this; - } - - @Override - public SpanBuilder addLink(SpanContext spanContext, Attributes attributes) { - return this; - } - - @Override - public SpanBuilder setAttribute(String key, String value) { - this.attributes.put(key, value); - return this; - } - - @Override - public SpanBuilder setAttribute(String key, long value) { - this.attributes.put(key, value); - return this; - } - - @Override - public SpanBuilder setAttribute(String key, double value) { - this.attributes.put(key, value); - return this; - } - - @Override - public SpanBuilder setAttribute(String key, boolean value) { - this.attributes.put(key, value); - return this; - } - - @Override - public SpanBuilder setAttribute(AttributeKey key, T value) { - this.attributes.put(key.getKey(), value); - return this; - } - - @Override - public SpanBuilder setSpanKind(SpanKind spanKind) { - this.spanKind = spanKind; - return this; - } - - @Override - public SpanBuilder setStartTimestamp(long startTimestamp, TimeUnit unit) { - this.startTimestamp = unit.toMillis(startTimestamp); - return this; - } - - @Override - public Span startSpan() { - if (this.startTimestamp == null) { - this.startTimestamp = System.currentTimeMillis(); - } - return new TestSpan(spanName, parentContext, attributes, spanKind, startTimestamp); - } - } - - class TestSpan implements Span { - private String name; - private final Context parentContext; - private final Map attributes; - private final SpanKind spanKind; - private Throwable exception; - private Long startTimestamp; - private Long endTimestamp; - - TestSpan(String spanName, Context parentContext, Map attributes, SpanKind spanKind, Long startTimestamp) { - this.name = spanName; - this.parentContext = parentContext; - this.attributes = attributes; - this.spanKind = spanKind; - this.startTimestamp = startTimestamp; - } - - @Override - public Span setAttribute(AttributeKey key, T value) { - this.attributes.put(key.getKey(), value); - return this; - } - - @Override - public Span addEvent(String name, Attributes attributes) { - return this; - } - - @Override - public Span addEvent(String name, Attributes attributes, long timestamp, TimeUnit unit) { - return this; - } - - @Override - public Span setStatus(StatusCode statusCode, String description) { - return this; - } - - @Override - public Span recordException(Throwable exception, Attributes additionalAttributes) { - this.exception = exception; - return this; - } - - @Override - public Span updateName(String name) { - this.name = name; - return this; - } - - @Override - public void end() { - this.endTimestamp = System.currentTimeMillis(); - } - - @Override - public void end(long timestamp, TimeUnit unit) { - this.endTimestamp = unit.toMillis(timestamp); - } - - @Override - public SpanContext getSpanContext() { - return null; - } - - @Override - public boolean isRecording() { - return this.endTimestamp != null; - } - } -} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index eda3d48470c00..eb3b34a6f3af7 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -50,7 +50,7 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - tracer.set(new APMTracer(settings, clusterService)); + tracer.set(new APMTracer(settings, clusterService, new APMAgentSettings())); return List.of(tracer.get()); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index 7025d8a8dddef..c39a57dca47bf 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -7,8 +7,11 @@ package org.elasticsearch.xpack.apm; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.Assertions; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.core.SuppressForbidden; import java.security.AccessController; import java.security.PrivilegedAction; @@ -21,7 +24,9 @@ import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; -abstract class APMAgentSettings { +class APMAgentSettings { + + private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class); /** * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent @@ -33,12 +38,15 @@ abstract class APMAgentSettings { ); // end::noformat - static void setAgentSetting(String key, String value) { + @SuppressForbidden(reason = "Need to be able to manipulate APM agent-related properties to set them dynamically") + void setAgentSetting(String key, String value) { final String completeKey = "elastic.apm." + Objects.requireNonNull(key); AccessController.doPrivileged((PrivilegedAction) () -> { if (value == null || value.isEmpty()) { + LOGGER.trace("Clearing system property [{}]", completeKey); System.clearProperty(completeKey); } else { + LOGGER.trace("Setting setting property [{}] to [{}]", completeKey, value); System.setProperty(completeKey, value); } return null; @@ -227,45 +235,53 @@ static void setAgentSetting(String key, String value) { "aws_lambda_handler" ); - static final String SETTING_PREFIX = "xpack.apm.tracing."; - - static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting(SETTING_PREFIX + "agent.", (qualifiedKey) -> { - final String[] parts = qualifiedKey.split("\\."); - final String key = parts[parts.length - 1]; - final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); - return new Setting<>(qualifiedKey, defaultValue, (value) -> { - // The `Setting` constructor asserts that a setting's parser doesn't return null when called with the default - // value. This makes less sense for prefix settings, but is particularly problematic here since we validate - // the setting name and reject unknown keys. Thus, if assertions are enabled, we have to tolerate the "_na_" key, - // which comes from `Setting#prefixKeySetting()`. - if (Assertions.ENABLED && qualifiedKey.equals("_na_")) { - return value; - } - if (AGENT_KEYS.contains(key) == false) { - throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); - } - if (STATIC_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException( - "Cannot set [" - + qualifiedKey - + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" - ); - } - if (PROHIBITED_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); - } + static final String APM_SETTING_PREFIX = "xpack.apm.tracing."; + + static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( + APM_SETTING_PREFIX + "agent.", + (qualifiedKey) -> { + final String[] parts = qualifiedKey.split("\\."); + final String key = parts[parts.length - 1]; + final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); + return new Setting<>(qualifiedKey, defaultValue, (value) -> { + // The `Setting` constructor asserts that a setting's parser doesn't return null when called with the default + // value. This makes less sense for prefix settings, but is particularly problematic here since we validate + // the setting name and reject unknown keys. Thus, if assertions are enabled, we have to tolerate the "_na_" key, + // which comes from `Setting#prefixKeySetting()`. + if (Assertions.ENABLED && qualifiedKey.equals("_na_")) { + return value; + } + if (AGENT_KEYS.contains(key) == false) { + throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); + } + if (STATIC_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException( + "Cannot set [" + + qualifiedKey + + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" + ); + } + if (PROHIBITED_AGENT_KEYS.contains(key)) { + throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); + } - return value; - }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); - }); + return value; + }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); + } + ); static final Setting> APM_TRACING_NAMES_INCLUDE_SETTING = Setting.listSetting( - SETTING_PREFIX + "names.include", + APM_SETTING_PREFIX + "names.include", Collections.emptyList(), Function.identity(), OperatorDynamic, NodeScope ); - static final Setting APM_ENABLED_SETTING = Setting.boolSetting(SETTING_PREFIX + "enabled", false, OperatorDynamic, NodeScope); + static final Setting APM_ENABLED_SETTING = Setting.boolSetting( + APM_SETTING_PREFIX + "enabled", + false, + OperatorDynamic, + NodeScope + ); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index f1a378f7c4f7d..f68911a00c0fc 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -16,6 +16,8 @@ import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -40,65 +42,60 @@ import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; -import static org.elasticsearch.xpack.apm.APMAgentSettings.SETTING_PREFIX; public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { - private record ContextWrapper(Context context) { - Span span() { - return Span.fromContextOrNull(this.context); - } - - void close() { - this.span().end(); - } - } + private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ClusterService clusterService; private volatile boolean enabled; private volatile APMServices services; private List includeNames; + private final APMAgentSettings apmAgentSettings; /** * This class is required to make all open telemetry services visible at once */ - private record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} + record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} - public APMTracer(Settings settings, ClusterService clusterService) { + public APMTracer(Settings settings, ClusterService clusterService, APMAgentSettings apmAgentSettings) { this.clusterService = Objects.requireNonNull(clusterService); - this.enabled = APM_ENABLED_SETTING.get(settings); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); + this.apmAgentSettings = apmAgentSettings; + + this.enabled = APM_ENABLED_SETTING.get(settings); + this.apmAgentSettings.setAgentSetting("recording", Boolean.toString(this.enabled)); // Apply default values for some system properties. Although we configure // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't // do anything if those settings are never configured. APM_AGENT_DEFAULT_SETTINGS.keySet() .forEach( - key -> APMAgentSettings.setAgentSetting( + key -> apmAgentSettings.setAgentSetting( key, - APM_AGENT_SETTINGS.getConcreteSetting(SETTING_PREFIX + "agent." + key).get(settings) + APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings) ) ); // Then apply values from the settings in the cluster state - APM_AGENT_SETTINGS.getAsMap(settings).forEach(APMAgentSettings::setAgentSetting); + APM_AGENT_SETTINGS.getAsMap(settings).forEach(apmAgentSettings::setAgentSetting); final ClusterSettings clusterSettings = clusterService.getClusterSettings(); clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); - clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(APMAgentSettings::setAgentSetting), (x, y) -> {}); + clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(apmAgentSettings::setAgentSetting), (x, y) -> {}); } private void setEnabled(boolean enabled) { this.enabled = enabled; // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. - APMAgentSettings.setAgentSetting("recording", Boolean.toString(enabled)); + this.apmAgentSettings.setAgentSetting("recording", Boolean.toString(enabled)); if (enabled) { - createApmServices(); + this.services = createApmServices(); } else { destroyApmServices(); } @@ -111,7 +108,7 @@ private void setIncludeNames(List includeNames) { @Override protected void doStart() { if (enabled) { - createApmServices(); + this.services = createApmServices(); } } @@ -123,11 +120,11 @@ protected void doStop() { @Override protected void doClose() {} - private void createApmServices() { + private APMServices createApmServices() { assert this.enabled; assert this.services == null; - this.services = AccessController.doPrivileged((PrivilegedAction) () -> { + return AccessController.doPrivileged((PrivilegedAction) () -> { var openTelemetry = GlobalOpenTelemetry.get(); var tracer = openTelemetry.getTracer("elasticsearch", Version.CURRENT.toString()); @@ -142,6 +139,10 @@ private void destroyApmServices() { @Override public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { + assert threadContext != null; + assert traceable != null; + + // If tracing has been disabled, return immediately var services = this.services; if (services == null) { return; @@ -151,18 +152,10 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { return; } - spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { + spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); - // https://github.com/open-telemetry/opentelemetry-java/discussions/2884#discussioncomment-381870 - // If you just want to propagate across threads within the same process, you don't need context propagators (extract/inject). - // You can just pass the Context object directly to another thread (it is immutable and thus thread-safe). - - // local parent first, remote parent as fallback - Context parentContext = getLocalParentContext(threadContext); - if (parentContext == null) { - parentContext = getRemoteParentContext(threadContext); - } + final Context parentContext = getParentContext(threadContext); if (parentContext != null) { spanBuilder.setParent(parentContext); } @@ -181,14 +174,44 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { // propagated threadContext.putHeader(spanHeaders); - return new ContextWrapper(contextForNewSpan); + return contextForNewSpan; })); } + private Context getParentContext(ThreadContext threadContext) { + // https://github.com/open-telemetry/opentelemetry-java/discussions/2884#discussioncomment-381870 + // If you just want to propagate across threads within the same process, you don't need context propagators (extract/inject). + // You can just pass the Context object directly to another thread (it is immutable and thus thread-safe). + + // Attempt to fetch a local parent context first, otherwise look for a remote parent + Context parentContext = threadContext.getTransient("parent_" + Task.APM_TRACE_CONTEXT); + if (parentContext == null) { + final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); + final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); + + if (traceParentHeader != null) { + final Map traceContextMap = new HashMap<>(2); + // traceparent and tracestate should match the keys used by W3CTraceContextPropagator + traceContextMap.put(Task.TRACE_PARENT_HTTP_HEADER, traceParentHeader); + if (traceStateHeader != null) { + traceContextMap.put(Task.TRACE_STATE, traceStateHeader); + } + parentContext = services.openTelemetry.getPropagators() + .getTextMapPropagator() + .extract(Context.current(), traceContextMap, new MapKeyGetter()); + } + } + return parentContext; + } + @Override public Releasable withScope(Traceable traceable) { - var scope = spans.get(traceable.getSpanId()).context.makeCurrent(); - return scope::close; + final Context context = spans.get(traceable.getSpanId()); + if (context != null) { + var scope = context.makeCurrent(); + return scope::close; + } + return () -> {}; } private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, SpanBuilder spanBuilder) { @@ -225,41 +248,41 @@ private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, @Override public void onTraceException(Traceable traceable, Throwable throwable) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().recordException(throwable); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.recordException(throwable); } } @Override public void setAttribute(Traceable traceable, String key, boolean value) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().setAttribute(key, value); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, double value) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().setAttribute(key, value); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, long value) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().setAttribute(key, value); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.setAttribute(key, value); } } @Override public void setAttribute(Traceable traceable, String key, String value) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().setAttribute(key, value); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.setAttribute(key, value); } } @@ -269,41 +292,19 @@ private boolean isSpanNameIncluded(String name) { return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); } - private Context getLocalParentContext(ThreadContext threadContext) { - return threadContext.getTransient("parent_" + Task.APM_TRACE_CONTEXT); - } - - private Context getRemoteParentContext(ThreadContext threadContext) { - final String traceParentHeader = threadContext.getTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER); - final String traceStateHeader = threadContext.getTransient("parent_" + Task.TRACE_STATE); - - if (traceParentHeader != null) { - final Map traceContextMap = new HashMap<>(2); - // traceparent and tracestate should match the keys used by W3CTraceContextPropagator - traceContextMap.put(Task.TRACE_PARENT_HTTP_HEADER, traceParentHeader); - if (traceStateHeader != null) { - traceContextMap.put(Task.TRACE_STATE, traceStateHeader); - } - return services.openTelemetry.getPropagators() - .getTextMapPropagator() - .extract(Context.current(), traceContextMap, new MapKeyGetter()); - } - return null; - } - @Override public void onTraceStopped(Traceable traceable) { - final var context = spans.remove(traceable.getSpanId()); - if (context != null) { - context.close(); + final var span = Span.fromContextOrNull(spans.remove(traceable.getSpanId())); + if (span != null) { + span.end(); } } @Override public void onTraceEvent(Traceable traceable, String eventName) { - final var context = spans.get(traceable.getSpanId()); - if (context != null) { - context.span().addEvent(eventName); + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); + if (span != null) { + span.addEvent(eventName); } } @@ -323,4 +324,9 @@ public String get(Map carrier, String key) { private static boolean isSupportedContextKey(String key) { return Task.TRACE_PARENT_HTTP_HEADER.equals(key) || Task.TRACE_STATE.equals(key); } + + // VisibleForTesting + Map getSpans() { + return spans; + } } diff --git a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java new file mode 100644 index 0000000000000..18a3c8c891029 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java @@ -0,0 +1,211 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.tracing.Traceable; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class APMTracerTests extends ESTestCase { + + /** + * Check that the tracer doesn't create spans when tracing is disabled. + */ + public void test_onTraceStarted_withTracingDisabled_doesNotStartTrace() { + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), false).build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceable = new TestTraceable("1"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceable); + + assertThat(apmTracer.getSpans(), anEmptyMap()); + } + + /** + * Check that the tracer doesn't create spans if a Traceable's span name is filtered out. + */ + public void test_onTraceStarted_withSpanNameOmitted_doesNotStartTrace() { + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .putList(APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), List.of("filtered*")) + .build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceable = new TestTraceable("1"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceable); + + assertThat(apmTracer.getSpans(), anEmptyMap()); + } + + /** + * Check that when a trace is started, the tracer starts a span and records it. + */ + public void test_onTraceStarted_startsTrace() { + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceable = new TestTraceable("1"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceable); + + assertThat(apmTracer.getSpans(), aMapWithSize(1)); + assertThat(apmTracer.getSpans(), hasKey(traceable.getSpanId())); + } + + /** + * Check that when a trace is started, the tracer ends the span and removes the record of it. + */ + public void test_onTraceStopped_stopsTrace() { + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceable = new TestTraceable("1"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceable); + apmTracer.onTraceStopped(traceable); + + assertThat(apmTracer.getSpans(), anEmptyMap()); + } + + /** + * Check that when the tracer starts, it applies the default values for some agent settings to the system properties. + */ + public void test_whenTracerCreated_defaultSettingsApplied() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + buildTracer(settings, apmAgentSettings); + + verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.5"); + } + + /** + * Check that when the tracer starts and applies the default agent setting values the system properties, their values + * are overridden from the cluster settings, if the cluster settings contain values for those agent settings. + */ + public void test_whenTracerCreated_clusterSettingsOverrideDefaults() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .put(APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75") + .build(); + buildTracer(settings, apmAgentSettings); + + // This happens twice because we first apply the default settings, whose values are overridden + // from the cluster settings, then we apply all the APM-agent related settings, not just the + // ones with default values. Although there is some redundancy here, it only happens at startup + // for a very small number of settings. + verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75"); + } + + /** + * Check that when the tracer starts, it applies all other agent settings to the system properties. + */ + public void test_whenTracerCreated_clusterSettingsAlsoApplied() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .put(APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") + .build(); + buildTracer(settings, apmAgentSettings); + + verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); + } + + /** + * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. + */ + public void test_whenTracerEnabled_setsRecordingProperty() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + buildTracer(settings, apmAgentSettings); + + verify(apmAgentSettings).setAgentSetting("recording", "true"); + } + + /** + * Check that when a trace is started, then the thread context is updated with tracing information. + *

+ * We expect the APM agent to inject the {@link Task#TRACE_PARENT_HTTP_HEADER} and {@link Task#TRACE_STATE} + * headers into the context, and it does, but this doesn't happen in the unit tests. We can + * check that the local context object is added, however. + */ + public void test_whenTraceStarted_threadContextIsPopulated() { + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceable = new TestTraceable("1"); + ThreadContext threadContext = new ThreadContext(settings); + apmTracer.onTraceStarted(threadContext, traceable); + assertThat(threadContext.getTransient(Task.APM_TRACE_CONTEXT), notNullValue()); + } + + /** + * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. + */ + public void test_whenTracerDisabled_setsRecordingProperty() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), false).build(); + buildTracer(settings, apmAgentSettings); + + verify(apmAgentSettings, atLeastOnce()).setAgentSetting("recording", "false"); + } + + private APMTracer buildTracer(Settings settings) { + return buildTracer(settings, new APMAgentSettings()); + } + + private APMTracer buildTracer(Settings settings, APMAgentSettings apmAgentSettings) { + APM apm = new APM(settings); + + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, new HashSet<>(apm.getSettings()))); + when(clusterService.getClusterName()).thenReturn(new ClusterName("testCluster")); + + APMTracer tracer = new APMTracer(settings, clusterService, apmAgentSettings); + tracer.doStart(); + return tracer; + } + + private record TestTraceable(String id) implements Traceable { + @Override + public String getSpanId() { + return "test-span-id-" + id; + } + + @Override + public String getSpanName() { + return "test-span-name-" + id; + } + + @Override + public Map getAttributes() { + return Map.of(); + } + } +} From d329b7a69dce99334fcfe557d9d58e8fef976d40 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 3 May 2022 16:38:12 +0100 Subject: [PATCH 74/90] Make qa test work again --- qa/apm/build.gradle | 1 - qa/apm/docker-compose.yml | 24 ++++++++----------- .../org/elasticsearch/xpack/apm/ApmIT.java | 22 +++++++---------- 3 files changed, 18 insertions(+), 29 deletions(-) diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index fed016fe6b127..245f13422c1ef 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -22,7 +22,6 @@ dependencies { dockerCompose { environment.put 'STACK_VERSION', VersionProperties.elasticsearch - // removeContainers = false } elasticsearch_distributions { diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml index 620c8aa69ace7..85bdecff53c86 100644 --- a/qa/apm/docker-compose.yml +++ b/qa/apm/docker-compose.yml @@ -6,8 +6,6 @@ networks: services: apmserver: - # Referenced in the APM agent config in the ES container - # container_name: apmserver depends_on: kibana: condition: service_healthy @@ -24,9 +22,7 @@ services: KIBANA_FLEET_SERVICE_TOKEN: AAEAAWVsYXN0aWMvZmxlZXQtc2VydmVyL2VsYXN0aWMtcGFja2FnZS1mbGVldC1zZXJ2ZXItdG9rZW46bmgtcFhoQzRRQ2FXbms2U0JySGlWQQ KIBANA_FLEET_SETUP: "1" healthcheck: - test: - - CMD - - /bin/true + test: /bin/true image: docker.elastic.co/beats/elastic-agent:${STACK_VERSION} labels: - co.elastic.apm.stack-version=${STACK_VERSION} @@ -66,9 +62,7 @@ services: healthcheck: interval: 20s retries: 10 - test: - - CMD-SHELL - - curl -s -k http://localhost:9200/_cluster/health | grep -vq '"status":"red"' + test: curl -s -k http://localhost:9200/_cluster/health | grep -vq '"status":"red"' image: elasticsearch:test labels: - co.elastic.apm.stack-version=${STACK_VERSION} @@ -124,9 +118,7 @@ services: interval: 10s retries: 30 start_period: 10s - test: - - CMD-SHELL - - curl -s -k http://kibana:5601/api/status | grep -q 'All services are available' + test: curl -s -k http://kibana:5601/api/status | grep -q 'All services are available' image: docker.elastic.co/kibana/kibana:${STACK_VERSION} labels: - co.elastic.apm.stack-version=${STACK_VERSION} @@ -140,8 +132,10 @@ services: volumes: - ./config/kibana/kibana-8.yml:/usr/share/kibana/config/kibana.yml - wait-service: - container_name: wait + # Rather than mess aroud with threads in the test, just run `curl` in a + # loop to generate traces with a known path + tracegenerator: + container_name: tracegenerator depends_on: apmserver: condition: service_healthy @@ -149,7 +143,9 @@ services: condition: service_healthy kibana: condition: service_healthy - image: busybox + # Using this image is a simple way to get `curl`. + image: elasticsearch:test + command: /bin/sh -c "while true; do curl -s -k -u admin:changeme http://elasticsearch:9200/_nodes/stats ; sleep 5; done" volumes: esdata: diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index a83c099c6c9c2..114e1a2e31558 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -47,18 +47,20 @@ public void configureTracing() throws IOException { } /** - * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. + * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. The traces are generated in + * a separate Docker container, which continually fetches `/_notes/stats`. */ public void testCapturesTracesForHttpTraffic() throws Exception { - generateTraces(); - checkTracesDataStream(); assertTracesExist(); } - private void checkTracesDataStream() throws IOException { - assertOK(client().performRequest(new Request("GET", "/_data_stream/traces-apm-default"))); + private void checkTracesDataStream() throws Exception { + assertBusy(() -> { + final Response response = performRequestTolerantly(new Request("GET", "/_data_stream/traces-apm-default")); + assertOK(response); + }, 1, TimeUnit.MINUTES); } private void assertTracesExist() throws Exception { @@ -75,15 +77,7 @@ private void assertTracesExist() throws Exception { final List> documents = getDocuments(tracesSearchResponse); assertThat(documents, not(empty())); - }, 1, TimeUnit.MINUTES); - } - - private void generateTraces() throws IOException { - for (int i = 0; i < 20; i++) { - final Request nodesRequest = new Request("GET", "/_nodes/stats"); - final Response nodesResponse = client().performRequest(nodesRequest); - assertOK(nodesResponse); - } + }, 2, TimeUnit.MINUTES); } /** From 9dcf3698db4216bdbe0f003abd91e8c1fc951cfe Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 3 May 2022 16:43:46 +0100 Subject: [PATCH 75/90] More notes on tracing --- TRACING.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/TRACING.md b/TRACING.md index dc6da19480ecd..7359a2c96d08d 100644 --- a/TRACING.md +++ b/TRACING.md @@ -38,9 +38,9 @@ sampling rate: The APM agent pulls configuration from [multiple sources][agent-config], with a hierarchy that means, for example, that options set in the config file cannot be -overridden via system properties. This is a little unfortunate, since it means -that Elasticsearch cannot ship with sensible defaults for dynamic settings in -the config file, and override them via system properties. +overridden via system properties. This means that Elasticsearch cannot ship with +sensible defaults for dynamic settings in the config file and override them via +system properties. Instead, static or sensitive config values are put in the config file, and dynamic settings are left entirely to the system properties. The Elasticsearch @@ -91,7 +91,7 @@ First work out if you can turn it into a task. No, really. If you can't do that, you'll need to ensure that your class can get access to a `Tracer` instance (this is available to inject, or you'll need to pass it when your class is created). Then you need to call the appropriate methods on the -tracers when a span should start and end. +tracer when a span should start and end. ## What additional attributes should I set? @@ -114,6 +114,9 @@ tracing context. Using scope allows the APM agent to do the following: the active span (if any), so the agent can automatically get extra spans without manual instrumentation. +However, a scope must be closed in the same thread in which it was opened, which +cannot be guaranteed when using tasks. + In the OpenTelemetry documentation, spans, scope and context are fairly straightforward to use, since `Scope` is an `AutoCloseable` and so can be easily created and cleaned up use try-with-resources blocks. Unfortunately, From 3c4e3235a4aba4ebc8afa7299bfb55b0c4a3c960 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 11 May 2022 11:56:04 +0100 Subject: [PATCH 76/90] Add an exclude filter and filtering unit tests --- .../java/org/elasticsearch/xpack/apm/APM.java | 1 + .../xpack/apm/APMAgentSettings.java | 8 ++ .../elasticsearch/xpack/apm/APMTracer.java | 12 ++- .../xpack/apm/APMTracerTests.java | 80 +++++++++++++++++++ 4 files changed, 100 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index eb3b34a6f3af7..6817ee9db3bbb 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -59,6 +59,7 @@ public List> getSettings() { return List.of( APMAgentSettings.APM_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, + APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, APMAgentSettings.APM_AGENT_SETTINGS ); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index c39a57dca47bf..dc2210942254f 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -278,6 +278,14 @@ void setAgentSetting(String key, String value) { NodeScope ); + static final Setting> APM_TRACING_NAMES_EXCLUDE_SETTING = Setting.listSetting( + APM_SETTING_PREFIX + "names.exclude", + Collections.emptyList(), + Function.identity(), + OperatorDynamic, + NodeScope + ); + static final Setting APM_ENABLED_SETTING = Setting.boolSetting( APM_SETTING_PREFIX + "enabled", false, diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index f68911a00c0fc..34d6b99f01ce5 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -41,6 +41,7 @@ import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_DEFAULT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { @@ -54,6 +55,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private volatile APMServices services; private List includeNames; + private List excludeNames; private final APMAgentSettings apmAgentSettings; /** @@ -64,6 +66,7 @@ record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} public APMTracer(Settings settings, ClusterService clusterService, APMAgentSettings apmAgentSettings) { this.clusterService = Objects.requireNonNull(clusterService); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); + this.excludeNames = APM_TRACING_NAMES_EXCLUDE_SETTING.get(settings); this.apmAgentSettings = apmAgentSettings; this.enabled = APM_ENABLED_SETTING.get(settings); @@ -86,6 +89,7 @@ public APMTracer(Settings settings, ClusterService clusterService, APMAgentSetti final ClusterSettings clusterSettings = clusterService.getClusterSettings(); clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); + clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_EXCLUDE_SETTING, this::setExcludeNames); clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(apmAgentSettings::setAgentSetting), (x, y) -> {}); } @@ -105,6 +109,10 @@ private void setIncludeNames(List includeNames) { this.includeNames = includeNames; } + private void setExcludeNames(List excludeNames) { + this.excludeNames = excludeNames; + } + @Override protected void doStart() { if (enabled) { @@ -289,7 +297,9 @@ public void setAttribute(Traceable traceable, String key, String value) { private boolean isSpanNameIncluded(String name) { // Alternatively we could use automata here but it is much more complex // and it needs wrapping like done for use in the security plugin. - return includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); + final boolean include = includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); + final boolean exclude = excludeNames.isEmpty() == false && Regex.simpleMatch(excludeNames, name); + return include && exclude == false; } @Override diff --git a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java index 18a3c8c891029..08190c2bbd371 100644 --- a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java +++ b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java @@ -22,10 +22,12 @@ import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; @@ -176,6 +178,84 @@ public void test_whenTracerDisabled_setsRecordingProperty() { verify(apmAgentSettings, atLeastOnce()).setAgentSetting("recording", "false"); } + /** + * Check that when a tracer has a list of include names configured, then those + * names are used to filter spans. + */ + public void test_whenTraceStarted_andSpanNameIncluded_thenSpanIsStarted() { + final List includePatterns = List.of( + // exact name + "test-span-name-aaa", + // regex + "test-span-name-b*" + ); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .putList(APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) + .build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceableA = new TestTraceable("aaa"); + Traceable traceableB = new TestTraceable("bbb"); + Traceable traceableC = new TestTraceable("ccc"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableA); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableB); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableC); + + assertThat(apmTracer.getSpans(), hasKey(traceableA.getSpanId())); + assertThat(apmTracer.getSpans(), hasKey(traceableB.getSpanId())); + assertThat(apmTracer.getSpans(), not(hasKey(traceableC.getSpanId()))); + } + + /** + * Check that when a tracer has a list of include and exclude names configured, and + * a span matches both, then the exclude filters take precedence. + */ + public void test_whenTraceStarted_andSpanNameIncludedAndExcluded_thenSpanIsNotStarted() { + final List includePatterns = List.of("test-span-name-a*"); + final List excludePatterns = List.of("test-span-name-a*"); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .putList(APM_TRACING_NAMES_INCLUDE_SETTING.getKey(), includePatterns) + .putList(APM_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) + .build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceableA = new TestTraceable("aaa"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableA); + + assertThat(apmTracer.getSpans(), not(hasKey(traceableA.getSpanId()))); + } + + /** + * Check that when a tracer has a list of exclude names configured, then those + * names are used to filter spans. + */ + public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { + final List excludePatterns = List.of( + // exact name + "test-span-name-aaa", + // regex + "test-span-name-b*" + ); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .putList(APM_TRACING_NAMES_EXCLUDE_SETTING.getKey(), excludePatterns) + .build(); + APMTracer apmTracer = buildTracer(settings); + + Traceable traceableA = new TestTraceable("aaa"); + Traceable traceableB = new TestTraceable("bbb"); + Traceable traceableC = new TestTraceable("ccc"); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableA); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableB); + apmTracer.onTraceStarted(new ThreadContext(settings), traceableC); + + assertThat(apmTracer.getSpans(), not(hasKey(traceableA.getSpanId()))); + assertThat(apmTracer.getSpans(), not(hasKey(traceableB.getSpanId()))); + assertThat(apmTracer.getSpans(), hasKey(traceableC.getSpanId())); + } + private APMTracer buildTracer(Settings settings) { return buildTracer(settings, new APMAgentSettings()); } From 76ebf992d1193d66eb179abc349a1bf4a4efce15 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 12 May 2022 11:07:07 +0100 Subject: [PATCH 77/90] Switch to automaton instead of regexes --- .../elasticsearch/xpack/apm/APMTracer.java | 48 +++++++++++++++---- 1 file changed, 39 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 34d6b99f01ce5..791a53954a314 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -18,6 +18,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.automaton.Automata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -56,6 +61,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private List includeNames; private List excludeNames; + private volatile CharacterRunAutomaton filterAutomaton; private final APMAgentSettings apmAgentSettings; /** @@ -67,6 +73,7 @@ public APMTracer(Settings settings, ClusterService clusterService, APMAgentSetti this.clusterService = Objects.requireNonNull(clusterService); this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); this.excludeNames = APM_TRACING_NAMES_EXCLUDE_SETTING.get(settings); + this.filterAutomaton = buildAutomaton(includeNames, excludeNames); this.apmAgentSettings = apmAgentSettings; this.enabled = APM_ENABLED_SETTING.get(settings); @@ -107,10 +114,12 @@ private void setEnabled(boolean enabled) { private void setIncludeNames(List includeNames) { this.includeNames = includeNames; + this.filterAutomaton = buildAutomaton(includeNames, excludeNames); } private void setExcludeNames(List excludeNames) { this.excludeNames = excludeNames; + this.filterAutomaton = buildAutomaton(includeNames, excludeNames); } @Override @@ -156,7 +165,7 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { return; } - if (isSpanNameIncluded(traceable.getSpanName()) == false) { + if (filterAutomaton.run(traceable.getSpanName()) == false) { return; } @@ -294,14 +303,6 @@ public void setAttribute(Traceable traceable, String key, String value) { } } - private boolean isSpanNameIncluded(String name) { - // Alternatively we could use automata here but it is much more complex - // and it needs wrapping like done for use in the security plugin. - final boolean include = includeNames.isEmpty() || Regex.simpleMatch(includeNames, name); - final boolean exclude = excludeNames.isEmpty() == false && Regex.simpleMatch(excludeNames, name); - return include && exclude == false; - } - @Override public void onTraceStopped(Traceable traceable) { final var span = Span.fromContextOrNull(spans.remove(traceable.getSpanId())); @@ -339,4 +340,33 @@ private static boolean isSupportedContextKey(String key) { Map getSpans() { return spans; } + + static CharacterRunAutomaton buildAutomaton(List includeNames, List excludeNames) { + Automaton includeAutomaton = patternsToAutomaton(includeNames); + Automaton excludeAutomaton = patternsToAutomaton(excludeNames); + + if (includeAutomaton == null) { + includeAutomaton = Automata.makeAnyString(); + } + + final Automaton finalAutomaton = excludeAutomaton == null + ? includeAutomaton + : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + + return new CharacterRunAutomaton(finalAutomaton); + } + + private static Automaton patternsToAutomaton(List patterns) { + final List automata = patterns.stream().map(s -> { + final String regex = s.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*"); + return new RegExp(regex).toAutomaton(); + }).toList(); + if (automata.isEmpty()) { + return null; + } + if (automata.size() == 1) { + return automata.get(0); + } + return Operations.union(automata); + } } From ccc47c6dbd2d54c650cd300d08030a902a3b27c5 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Thu, 2 Jun 2022 17:22:03 +0100 Subject: [PATCH 78/90] Redact sensitive http headers --- run.sh | 4 ++-- .../org/elasticsearch/rest/RestChannel.java | 10 +++++++++- .../org/elasticsearch/xpack/apm/APMTracer.java | 18 ++++++++++-------- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/run.sh b/run.sh index 1caaedf3c09fd..9cc95fd52e80c 100755 --- a/run.sh +++ b/run.sh @@ -23,11 +23,11 @@ fi # Optional - override the agent jar -# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.30.2-SNAPSHOT/elastic-apm-agent-1.30.2-SNAPSHOT.jar" +# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.31.1-SNAPSHOT/elastic-apm-agent-1.31.1-SNAPSHOT.jar" if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then # Copy in WIP agent - cp "$OVERRIDE_AGENT_JAR" "modules/apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" + cp "$OVERRIDE_AGENT_JAR" "modules/x-pack-apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" fi # Configure the agent diff --git a/server/src/main/java/org/elasticsearch/rest/RestChannel.java b/server/src/main/java/org/elasticsearch/rest/RestChannel.java index 35feb508fb424..85f2dc5a1cbf7 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.Locale; import java.util.Map; /** @@ -58,7 +59,14 @@ default String getSpanName() { default Map getAttributes() { final RestRequest req = this.request(); Map attributes = new HashMap<>(); - req.getHeaders().forEach((key, values) -> attributes.put("http.request.headers." + key, String.join("; ", values))); + req.getHeaders().forEach((key, values) -> { + final String lowerKey = key.toLowerCase(Locale.ROOT).replace('-', '_'); + final String value = switch (lowerKey) { + case "authorization", "cookie", "secret", "session", "set_cookie", "token" -> "[REDACTED]"; + default -> String.join("; ", values); + }; + attributes.put("http.request.headers." + lowerKey, value); + }); attributes.put("http.method", req.method().name()); attributes.put("http.url", req.uri()); switch (req.getHttpRequest().protocolVersion()) { diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 791a53954a314..ae2d9ef6f38d5 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -26,7 +26,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -232,18 +231,21 @@ public Releasable withScope(Traceable traceable) { } private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, SpanBuilder spanBuilder) { - for (Map.Entry entry : traceable.getAttributes().entrySet()) { + final Map spanAttributes = traceable.getAttributes(); + + for (Map.Entry entry : spanAttributes.entrySet()) { + final String key = entry.getKey(); final Object value = entry.getValue(); if (value instanceof String) { - spanBuilder.setAttribute(entry.getKey(), (String) value); + spanBuilder.setAttribute(key, (String) value); } else if (value instanceof Long) { - spanBuilder.setAttribute(entry.getKey(), (Long) value); + spanBuilder.setAttribute(key, (Long) value); } else if (value instanceof Integer) { - spanBuilder.setAttribute(entry.getKey(), (Integer) value); + spanBuilder.setAttribute(key, (Integer) value); } else if (value instanceof Double) { - spanBuilder.setAttribute(entry.getKey(), (Double) value); + spanBuilder.setAttribute(key, (Double) value); } else if (value instanceof Boolean) { - spanBuilder.setAttribute(entry.getKey(), (Boolean) value); + spanBuilder.setAttribute(key, (Boolean) value); } else { throw new IllegalArgumentException( "span attributes do not support value type of [" + value.getClass().getCanonicalName() + "]" @@ -251,7 +253,7 @@ private void setSpanAttributes(ThreadContext threadContext, Traceable traceable, } } - final boolean isHttpSpan = traceable.getAttributes().keySet().stream().anyMatch(key -> key.startsWith("http.")); + final boolean isHttpSpan = spanAttributes.keySet().stream().anyMatch(key -> key.startsWith("http.")); spanBuilder.setSpanKind(isHttpSpan ? SpanKind.SERVER : SpanKind.INTERNAL); spanBuilder.setAttribute(Traceable.AttributeKeys.NODE_NAME, clusterService.getNodeName()); From f0dbe4af5890fe4727027b59ffa7b8ef89444d05 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 8 Jun 2022 09:45:15 +0100 Subject: [PATCH 79/90] Post-merge fixes --- .../server/cli/JvmOptionsParser.java | 25 ++++++++----------- .../elasticsearch/server/cli/ServerCli.java | 7 +++--- .../server/cli/ServerProcess.java | 17 +++++++------ .../server/cli/ServerCliTests.java | 8 +++++- .../server/cli/ServerProcessTests.java | 24 ++++++++++++------ .../windows/service/WindowsServiceDaemon.java | 2 +- .../plugin-metadata/plugin-security.policy | 1 + 7 files changed, 49 insertions(+), 35 deletions(-) diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index 66960fa73bad9..4d6400a8bf1bb 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -12,7 +12,6 @@ import org.elasticsearch.cli.UserException; import java.io.BufferedReader; -import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -78,8 +77,8 @@ SortedMap invalidLines() { * @throws IOException if there is a problem reading any of the files * @throws UserException if there is a problem parsing the jvm.options file or jvm.options.d files */ - static List determineJvmOptions(Path configDir, Path pluginsDir, Path tmpDir, String envOptions) throws InterruptedException, - IOException, UserException { + static List determineJvmOptions(Path configDir, Path modulesDir, Path pluginsDir, Path tmpDir, String envOptions) + throws InterruptedException, IOException, UserException { final JvmOptionsParser parser = new JvmOptionsParser(); @@ -88,7 +87,7 @@ static List determineJvmOptions(Path configDir, Path pluginsDir, Path tm substitutions.put("ES_PATH_CONF", configDir.toString()); try { - return parser.jvmOptions(configDir, pluginsDir, envOptions, substitutions); + return parser.jvmOptions(configDir, modulesDir, pluginsDir, envOptions, substitutions); } catch (final JvmOptionsFileParserException e) { final String errorMessage = String.format( Locale.ROOT, @@ -117,17 +116,13 @@ static List determineJvmOptions(Path configDir, Path pluginsDir, Path tm } } - private List jvmOptions(final Path config, Path plugins, final String esJavaOpts, final Map substitutions) - throws InterruptedException, IOException, JvmOptionsFileParserException { - - final Path esHome = Path.of(System.getenv("ES_HOME")); - if (Files.notExists(esHome)) { - throw new RuntimeException("ES_HOME not set or doesn't exist"); - } - Path modules = esHome.resolve("modules"); - if (Files.notExists(modules) || Files.isDirectory(modules) == false) { - throw new RuntimeException("ES_HOME does not point to a valid installation - [modules] not found or not a directory"); - } + private List jvmOptions( + final Path config, + Path modules, + Path plugins, + final String esJavaOpts, + final Map substitutions + ) throws InterruptedException, IOException, JvmOptionsFileParserException { final List jvmOptions = readJvmOptionsFiles(config); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 9209440663c79..0a9f6b7eb1021 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -90,7 +90,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce syncPlugins(terminal, env, processInfo); ServerArgs args = createArgs(options, env, keystorePassword, processInfo); - this.server = startServer(terminal, processInfo, args, env.pluginsFile()); + this.server = startServer(terminal, processInfo, args, env.modulesFile(), env.pluginsFile()); if (options.has(daemonizeOption)) { server.detach(); @@ -221,7 +221,8 @@ protected Command loadTool(String toolname, String libs) { } // protected to allow tests to override - protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) throws UserException { - return ServerProcess.start(terminal, processInfo, args, pluginsDir); + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path modulesDir, Path pluginsDir) + throws UserException { + return ServerProcess.start(terminal, processInfo, args, modulesDir, pluginsDir); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index 3c03630b6dd40..040b0741b70a1 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -36,7 +36,7 @@ /** * A helper to control a {@link Process} running the main Elasticsearch server. * - *

The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs, Path)}. + *

The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs, Path, Path)}. * The process is controlled by internally sending arguments and control signals on stdin, * and receiving control signals on stderr. The start method does not return until the * server is ready to process requests and has exited the bootstrap thread. @@ -66,8 +66,8 @@ public class ServerProcess { // this allows mocking the process building by tests interface OptionsBuilder { - List getJvmOptions(Path configDir, Path pluginsDir, Path tmpDir, String envOptions) throws InterruptedException, - IOException, UserException; + List getJvmOptions(Path configDir, Path modulesDir, Path pluginsDir, Path tmpDir, String envOptions) + throws InterruptedException, IOException, UserException; } // this allows mocking the process building by tests @@ -85,8 +85,9 @@ interface ProcessStarter { * @return A running server process that is ready for requests * @throws UserException If the process failed during bootstrap */ - public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) throws UserException { - return start(terminal, processInfo, args, pluginsDir, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); + public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path modulesDir, Path pluginsDir) + throws UserException { + return start(terminal, processInfo, args, modulesDir, pluginsDir, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); } // package private so tests can mock options building and process starting @@ -94,6 +95,7 @@ static ServerProcess start( Terminal terminal, ProcessInfo processInfo, ServerArgs args, + Path modulesDir, Path pluginsDir, OptionsBuilder optionsBuilder, ProcessStarter processStarter @@ -103,7 +105,7 @@ static ServerProcess start( boolean success = false; try { - jvmProcess = createProcess(processInfo, args.configDir(), pluginsDir, optionsBuilder, processStarter); + jvmProcess = createProcess(processInfo, args.configDir(), modulesDir, pluginsDir, optionsBuilder, processStarter); errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); errorPump.start(); sendArgs(args, jvmProcess.getOutputStream()); @@ -198,6 +200,7 @@ private void sendShutdownMarker() { private static Process createProcess( ProcessInfo processInfo, Path configDir, + Path modulesDir, Path pluginsDir, OptionsBuilder optionsBuilder, ProcessStarter processStarter @@ -208,7 +211,7 @@ private static Process createProcess( envVars.put("LIBFFI_TMPDIR", tempDir.toString()); } - List jvmOptions = optionsBuilder.getJvmOptions(configDir, pluginsDir, tempDir, envVars.remove("ES_JAVA_OPTS")); + List jvmOptions = optionsBuilder.getJvmOptions(configDir, modulesDir, pluginsDir, tempDir, envVars.remove("ES_JAVA_OPTS")); // also pass through distribution type jvmOptions.add("-Des.distribution.type=" + processInfo.sysprops().get("es.distribution.type")); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index e7ba031fe9c00..88c3b7d1e903f 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -423,7 +423,13 @@ protected Command loadTool(String toolname, String libs) { } @Override - protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) { + protected ServerProcess startServer( + Terminal terminal, + ProcessInfo processInfo, + ServerArgs args, + Path modulesDir, + Path pluginsDir + ) { if (argsValidator != null) { argsValidator.accept(args); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index c0ef02d732885..c967cf31aee7e 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -92,7 +92,7 @@ public void resetEnv() { envVars.clear(); esHomeDir = createTempDir(); nodeSettings = Settings.builder(); - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> new ArrayList<>(); + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> new ArrayList<>(); processValidator = null; mainCallback = null; } @@ -201,7 +201,15 @@ ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePass process = new MockElasticsearchProcess(); return process; }; - return ServerProcess.start(terminal, pinfo, args, esHomeDir.resolve("plugins"), optionsBuilder, starter); + return ServerProcess.start( + terminal, + pinfo, + args, + esHomeDir.resolve("modules"), + esHomeDir.resolve("plugins"), + optionsBuilder, + starter + ); } public void testProcessBuilder() throws Exception { @@ -253,7 +261,7 @@ public void testStartError() throws Exception { } public void testOptionsBuildingInterrupted() throws Exception { - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { throw new InterruptedException("interrupted while get jvm options"); }; var e = expectThrows(RuntimeException.class, () -> runForeground()); @@ -279,7 +287,7 @@ public void testLibffiEnv() throws Exception { } public void testTempDir() throws Exception { - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); assertThat(tmpDir.getFileName().toString(), startsWith("elasticsearch-")); return new ArrayList<>(); @@ -291,7 +299,7 @@ public void testTempDirWindows() throws Exception { Path baseTmpDir = createTempDir(); sysprops.put("os.name", "Windows 10"); sysprops.put("java.io.tmpdir", baseTmpDir.toString()); - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); assertThat(tmpDir.getFileName().toString(), equalTo("elasticsearch")); assertThat(tmpDir.getParent().toString(), equalTo(baseTmpDir.toString())); @@ -303,7 +311,7 @@ public void testTempDirWindows() throws Exception { public void testTempDirOverride() throws Exception { Path customTmpDir = createTempDir(); envVars.put("ES_TMPDIR", customTmpDir.toString()); - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), equalTo(customTmpDir.toString())); return new ArrayList<>(); }; @@ -329,7 +337,7 @@ public void testTempDirOverrideNotADirectory() throws Exception { public void testCustomJvmOptions() throws Exception { envVars.put("ES_JAVA_OPTS", "-Dmyoption=foo"); - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { assertThat(envOptions, equalTo("-Dmyoption=foo")); return new ArrayList<>(); }; @@ -338,7 +346,7 @@ public void testCustomJvmOptions() throws Exception { } public void testCommandLineSysprops() throws Exception { - optionsBuilder = (configDir, pluginsDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz"); + optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz"); processValidator = pb -> { assertThat(pb.command(), contains("-Dfoo1=bar")); assertThat(pb.command(), contains("-Dfoo2=bar")); diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index ebd2e74fddf43..c358d513a74dd 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -35,7 +35,7 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { var args = new ServerArgs(false, true, null, new SecureString(""), env.settings(), env.configFile()); - this.server = ServerProcess.start(terminal, processInfo, args, env.pluginsFile()); + this.server = ServerProcess.start(terminal, processInfo, args, env.modulesFile(), env.pluginsFile()); // start does not return until the server is ready, and we do not wait for the process } diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index c2c58659d6f5e..f3e79d4e4c7be 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -14,6 +14,7 @@ grant { }; grant codeBase "${codebase.elastic-apm-agent}" { + permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "setContextClassLoader"; permission java.lang.RuntimePermission "setFactory"; From 55772a28a602f9eb6ba1ac2c413daf9a4d23302d Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 8 Jun 2022 11:38:00 +0100 Subject: [PATCH 80/90] Tweak log4j security policy --- .../resources/org/elasticsearch/bootstrap/security.policy | 4 ++++ .../src/main/plugin-metadata/plugin-security.policy | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy index f9b37f65538f2..568b07ea9fa16 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -71,6 +71,10 @@ grant codeBase "${codebase.jna}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; +grant codeBase "${codebase.log4j-api}" { + permission java.lang.RuntimePermission "getClassLoader"; +}; + //// Everything else: grant { diff --git a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy index f3e79d4e4c7be..c2c58659d6f5e 100644 --- a/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/apm-integration/src/main/plugin-metadata/plugin-security.policy @@ -14,7 +14,6 @@ grant { }; grant codeBase "${codebase.elastic-apm-agent}" { - permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "setContextClassLoader"; permission java.lang.RuntimePermission "setFactory"; From 8be329f95ff6e4fc8c48700a850c26bcbe04d111 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 14 Jun 2022 14:41:30 +0100 Subject: [PATCH 81/90] Switch to auto-generating an APM config The only way to pass a secret token to the APM Java agent so that it doesn't show up in the system properties or environment variables is to use a config file. Since we don't want ES operators to have to deploy yet another file, instead generate a config file when preparing the JVM options for Elasticsearch, and delete it again after ES starts up. This allows us to fetch the secret from the keystore, in line with how we handle other secrets. --- .../server/cli/APMJvmOptions.java | 225 ++++++++++++++++ .../server/cli/BootstrapJvmOptions.java | 57 ++-- .../server/cli/JvmOptionsParser.java | 38 ++- .../elasticsearch/server/cli/ServerCli.java | 2 +- .../server/cli/ServerProcess.java | 15 +- .../server/cli/ServerProcessTests.java | 15 +- .../windows/service/WindowsServiceDaemon.java | 2 +- qa/apm/build.gradle | 2 + qa/apm/docker-compose.yml | 13 +- qa/apm/entrypoint.sh | 18 -- .../org/elasticsearch/xpack/apm/ApmIT.java | 16 -- run.sh | 24 +- .../java/org/elasticsearch/node/Node.java | 32 +++ x-pack/plugin/apm-integration/build.gradle | 3 - .../src/main/config/elasticapm.properties | 55 ---- .../src/main/config/jvm.options.d/apm.options | 2 - .../java/org/elasticsearch/xpack/apm/APM.java | 13 +- .../xpack/apm/APMAgentSettings.java | 247 ++++-------------- .../elasticsearch/xpack/apm/APMTracer.java | 41 +-- .../xpack/apm/APMAgentSettingsTests.java | 88 +++++++ .../xpack/apm/APMTracerTests.java | 77 +----- 21 files changed, 508 insertions(+), 477 deletions(-) create mode 100644 distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java delete mode 100755 qa/apm/entrypoint.sh delete mode 100644 x-pack/plugin/apm-integration/src/main/config/elasticapm.properties delete mode 100644 x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options create mode 100644 x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMAgentSettingsTests.java diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java new file mode 100644 index 0000000000000..4183dca9efa45 --- /dev/null +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.server.cli; + +import org.elasticsearch.Version; +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; + +public class APMJvmOptions { + private static final Map STATIC_CONFIG; + private static final Map CONFIG_DEFAULTS; + + /** + * Lists all APM configuration keys that are not dynamic and must be configured via the config file. + */ + private static final List STATIC_AGENT_KEYS = List.of( + "api_key", + "aws_lambda_handler", + "breakdown_metrics", + "classes_excluded_from_instrumentation", + "cloud_provider", + "data_flush_timeout", + "disable_metrics", + "disable_send", + "enabled", + "enable_public_api_annotation_inheritance", + "environment", + "global_labels", + "hostname", + "include_process_args", + "log_ecs_formatter_allow_list", + "log_ecs_reformatting_additional_fields", + "log_ecs_reformatting_dir", + "log_file", + "log_file_size", + "log_format_file", + "log_format_sout", + "max_queue_size", + "metrics_interval", + "plugins_dir", + "profiling_inferred_spans_lib_directory", + "secret_token", + "service_name", + "service_node_name", + "service_version", + "stress_monitoring_interval", + "trace_methods_duration_threshold", + "use_jaxrs_path_as_transaction_name", + "verify_server_cert" + ); + + static { + STATIC_CONFIG = new HashMap<>(); + + // Required for OpenTelemetry support + STATIC_CONFIG.put("enable_experimental_instrumentations", "true"); + + // Identifies the version of Elasticsearch in the captured trace data. + STATIC_CONFIG.put("service_version", Version.CURRENT.toString()); + + // Configures a log file to write to. `_AGENT_HOME_` is a placeholder used + // by the agent. Don't disable writing to a log file, as the agent will then + // require extra Security Manager permissions when it tries to do something + // else, and it's just painful. + STATIC_CONFIG.put("log_file", "_AGENT_HOME_/../../logs/apm.log"); + + // ES does not use auto-instrumentation. + STATIC_CONFIG.put("instrument", "false"); + + CONFIG_DEFAULTS = new HashMap<>(); + + // This is used to keep all the errors and transactions of a service + // together and is the primary filter in the Elastic APM user interface. + // + // You can optionally also set `service_node_name`, which is used to + // distinguish between different nodes of a service, therefore it should + // be unique for each JVM within a service. If not set, data + // aggregations will be done based on a container ID (where valid) or on + // the reported hostname (automatically discovered or manually + // configured through hostname). However, if this node's `node.name` is + // set, then that value is used for the `service_node_name`. + CONFIG_DEFAULTS.put("service_name", "elasticsearch"); + + // An arbitrary string that identifies this deployment environment. For + // example, "dev", "staging" or "prod". Can be anything you like, but must + // have the same value across different systems in the same deployment + // environment. + CONFIG_DEFAULTS.put("environment", "dev"); + + // Logging configuration. Unless you need detailed logs about what the APM + // is doing, leave this value alone. + CONFIG_DEFAULTS.put("log_level", "error"); + CONFIG_DEFAULTS.put("application_packages", "org.elasticsearch,org.apache.lucene"); + CONFIG_DEFAULTS.put("metrics_interval", "120s"); + CONFIG_DEFAULTS.put("breakdown_metrics", "false"); + CONFIG_DEFAULTS.put("central_config", "false"); + } + + public static List apmJvmOptions(Settings settings, KeyStoreWrapper keystore, Path tmpdir) throws UserException, IOException { + final boolean enabled = settings.getAsBoolean("xpack.apm.tracing.enabled", false); + + if (enabled == false) { + return List.of(); + } + + final Optional agentJar = findAgentJar(); + + if (agentJar.isEmpty()) { + return List.of(); + } + + final Map propertiesMap = extractApmSettings(settings); + + if (propertiesMap.containsKey("service_node_name") == false) { + final String nodeName = settings.get("node.name"); + if (nodeName != null) { + propertiesMap.put("service_node_name", nodeName); + } + } + + if (keystore != null && keystore.getSettingNames().contains("xpack.apm.tracing.secret_token")) { + try (SecureString token = keystore.getString("xpack.apm.tracing.secret_token")) { + propertiesMap.put("secret_token", token.toString()); + } + } + + final Map dynamicSettings = extractDynamicSettings(propertiesMap); + + final File tempFile = writeApmProperties(tmpdir, propertiesMap); + + final List options = new ArrayList<>(); + + // Use an agent argument to specify the config file instead of e.g. `-Delastic.apm.config_file=...` + // because then the agent won't try to reload the file, and we can remove it after startup. + options.add("-javaagent:" + agentJar.get() + "=c=" + tempFile); + + dynamicSettings.forEach((key, value) -> options.add("-Delastic.apm." + key + "=" + value)); + + return options; + } + + private static Map extractDynamicSettings(Map propertiesMap) { + final Map cliOptionsMap = new HashMap<>(); + + final Iterator> propertiesIterator = propertiesMap.entrySet().iterator(); + while (propertiesIterator.hasNext()) { + final Map.Entry entry = propertiesIterator.next(); + if (STATIC_AGENT_KEYS.contains(entry.getKey()) == false) { + propertiesIterator.remove(); + cliOptionsMap.put(entry.getKey(), entry.getValue()); + } + } + + return cliOptionsMap; + } + + private static Map extractApmSettings(Settings settings) throws UserException { + final Map propertiesMap = new HashMap<>(); + + final Settings agentSettings = settings.getByPrefix("xpack.apm.tracing.agent."); + agentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(agentSettings.get(key)))); + + // These settings must not be changed + for (String key : STATIC_CONFIG.keySet()) { + if (propertiesMap.containsKey(key)) { + throw new UserException( + ExitCodes.CONFIG, + "Do not set a value for [xpack.apm.tracing.agent." + key + "], as this is configured automatically by Elasticsearch" + ); + } + } + + CONFIG_DEFAULTS.forEach(propertiesMap::putIfAbsent); + + propertiesMap.putAll(STATIC_CONFIG); + return propertiesMap; + } + + private static File writeApmProperties(Path tmpdir, Map propertiesMap) throws IOException { + final Properties p = new Properties(); + p.putAll(propertiesMap); + + File tempFile = File.createTempFile(".elstcapm.", ".tmp", tmpdir.toFile()); + try (OutputStream os = new FileOutputStream(tempFile)) { + p.store(os, " Automatically generated by Elasticsearch, do not edit!"); + } + return tempFile; + } + + private static Optional findAgentJar() throws IOException { + final Path apmModule = Path.of("modules/x-pack-apm-integration"); + + if (Files.isDirectory(apmModule) == false) { + return Optional.empty(); + } + + try (var apmStream = Files.list(apmModule)) { + return apmStream.filter(path -> path.getFileName().toString().matches("elastic-apm-agent-\\d+\\.\\d+\\.\\d+\\.jar")) + .findFirst(); + } + } +} diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java index c358a785f9187..699a8b4d850cd 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java @@ -17,7 +17,7 @@ import java.util.List; import java.util.Locale; import java.util.Properties; -import java.util.function.Consumer; +import java.util.stream.Stream; /** * This class looks for plugins whose "type" is "bootstrap". Such plugins @@ -28,26 +28,24 @@ public class BootstrapJvmOptions { private BootstrapJvmOptions() {} - public static List bootstrapJvmOptions(Path modules, Path plugins) throws IOException { - if (Files.isDirectory(modules) == false) { - throw new IllegalArgumentException("Modules path " + modules + " must be a directory"); - } - + public static List bootstrapJvmOptions(Path plugins) throws IOException { if (Files.isDirectory(plugins) == false) { throw new IllegalArgumentException("Plugins path " + plugins + " must be a directory"); } - final List modulesInfo = getPluginInfo(modules); final List pluginInfo = getPluginInfo(plugins); - return generateOptions(modulesInfo, pluginInfo); + return generateOptions(pluginInfo); } // Find all plugins and return their jars and descriptors. private static List getPluginInfo(Path plugins) throws IOException { final List pluginInfo = new ArrayList<>(); - final List pluginDirs = Files.list(plugins).toList(); + final List pluginDirs; + try (Stream pluginDirStream = Files.list(plugins)) { + pluginDirs = pluginDirStream.toList(); + } for (Path pluginDir : pluginDirs) { if (Files.isDirectory(pluginDir) == false) { @@ -56,7 +54,10 @@ private static List getPluginInfo(Path plugins) throws IOException { final List jarFiles = new ArrayList<>(); final Properties props = new Properties(); - final List pluginFiles = Files.list(pluginDir).toList(); + final List pluginFiles; + try (Stream pluginFileStream = Files.list(pluginDir)) { + pluginFiles = pluginFileStream.toList(); + } for (Path pluginFile : pluginFiles) { final String lowerCaseName = pluginFile.getFileName().toString().toLowerCase(Locale.ROOT); @@ -80,33 +81,33 @@ private static List getPluginInfo(Path plugins) throws IOException { } // package-private for testing - static List generateOptions(List modulesInfo, List pluginInfo) { + static List generateOptions(List pluginInfo) { final List bootstrapJars = new ArrayList<>(); - final List extraJavaOptions = new ArrayList<>(); - - // Add any additional Java CLI options. This could contain any number of options, - // but we don't attempt to split them up as all JVM options are concatenated together - // anyway + final List bootstrapOptions = new ArrayList<>(); - final Consumer infoConsumer = info -> { + for (PluginInfo info : pluginInfo) { final String type = info.properties.getProperty("type", "isolated").toLowerCase(Locale.ROOT); + if (type.equals("bootstrap")) { bootstrapJars.addAll(info.jarFiles); - } - final String javaOpts = info.properties.getProperty("java.opts", ""); - if (javaOpts.isBlank() == false) { - extraJavaOptions.add(javaOpts); - } - }; - modulesInfo.forEach(infoConsumer); - pluginInfo.forEach(infoConsumer); + // Add any additional Java CLI options. This could contain any number of options, + // but we don't attempt to split them up as all JVM options are concatenated together + // anyway + final String javaOpts = info.properties.getProperty("java.opts", ""); + if (javaOpts.isBlank() == false) { + bootstrapOptions.add(javaOpts); + } + } + } - if (bootstrapJars.isEmpty() == false) { - extraJavaOptions.add("-Xbootclasspath/a:" + String.join(":", bootstrapJars)); + if (bootstrapJars.isEmpty()) { + return List.of(); } - return extraJavaOptions; + bootstrapOptions.add("-Xbootclasspath/a:" + String.join(":", bootstrapJars)); + + return bootstrapOptions; } // package-private for testing diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index 4d6400a8bf1bb..2bd38c8a07d09 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -8,8 +8,10 @@ package org.elasticsearch.server.cli; +import org.elasticsearch.bootstrap.ServerArgs; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.KeyStoreWrapper; import java.io.BufferedReader; import java.io.IOException; @@ -20,6 +22,7 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -77,7 +80,7 @@ SortedMap invalidLines() { * @throws IOException if there is a problem reading any of the files * @throws UserException if there is a problem parsing the jvm.options file or jvm.options.d files */ - static List determineJvmOptions(Path configDir, Path modulesDir, Path pluginsDir, Path tmpDir, String envOptions) + static List determineJvmOptions(ServerArgs args, Path configDir, Path pluginsDir, Path tmpDir, String envOptions) throws InterruptedException, IOException, UserException { final JvmOptionsParser parser = new JvmOptionsParser(); @@ -87,7 +90,7 @@ static List determineJvmOptions(Path configDir, Path modulesDir, Path pl substitutions.put("ES_PATH_CONF", configDir.toString()); try { - return parser.jvmOptions(configDir, modulesDir, pluginsDir, envOptions, substitutions); + return parser.jvmOptions(args, configDir, pluginsDir, envOptions, substitutions); } catch (final JvmOptionsFileParserException e) { final String errorMessage = String.format( Locale.ROOT, @@ -117,14 +120,14 @@ static List determineJvmOptions(Path configDir, Path modulesDir, Path pl } private List jvmOptions( - final Path config, - Path modules, + ServerArgs args, + final Path configDir, Path plugins, final String esJavaOpts, final Map substitutions - ) throws InterruptedException, IOException, JvmOptionsFileParserException { + ) throws InterruptedException, IOException, JvmOptionsFileParserException, UserException { - final List jvmOptions = readJvmOptionsFiles(config); + final List jvmOptions = readJvmOptionsFiles(configDir); if (esJavaOpts != null) { jvmOptions.addAll(Arrays.stream(esJavaOpts.split("\\s+")).filter(Predicate.not(String::isBlank)).toList()); @@ -134,18 +137,31 @@ private List jvmOptions( final MachineDependentHeap machineDependentHeap = new MachineDependentHeap( new OverridableSystemMemoryInfo(substitutedJvmOptions, new DefaultSystemMemoryInfo()) ); - substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); + substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(configDir, substitutedJvmOptions)); final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(); - final List bootstrapOptions = BootstrapJvmOptions.bootstrapJvmOptions(modules, plugins); + final List bootstrapOptions = BootstrapJvmOptions.bootstrapJvmOptions(plugins); + + final List apmOptions; + try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(configDir)) { + if (keyStoreWrapper != null) { + try { + keyStoreWrapper.decrypt(args.keystorePassword().clone().getChars()); + } catch (GeneralSecurityException e) { + throw new RuntimeException("Failed to decrypt keystore: " + e.getMessage(), e); + } + } + apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), keyStoreWrapper, Path.of(substitutions.get("ES_TMPDIR"))); + } - final List finalJvmOptions = new ArrayList<>( - systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size() + bootstrapOptions.size() - ); + final int numOptions = systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size() + bootstrapOptions.size() + + apmOptions.size(); + final List finalJvmOptions = new ArrayList<>(numOptions); finalJvmOptions.addAll(systemJvmOptions); // add the system JVM options first so that they can be overridden finalJvmOptions.addAll(substitutedJvmOptions); finalJvmOptions.addAll(ergonomicJvmOptions); finalJvmOptions.addAll(bootstrapOptions); + finalJvmOptions.addAll(apmOptions); return finalJvmOptions; } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 0a9f6b7eb1021..accd646e2c2ea 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -223,6 +223,6 @@ protected Command loadTool(String toolname, String libs) { // protected to allow tests to override protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path modulesDir, Path pluginsDir) throws UserException { - return ServerProcess.start(terminal, processInfo, args, modulesDir, pluginsDir); + return ServerProcess.start(terminal, processInfo, args, pluginsDir); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index 040b0741b70a1..c87ebe07acf1c 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -36,7 +36,7 @@ /** * A helper to control a {@link Process} running the main Elasticsearch server. * - *

The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs, Path, Path)}. + *

The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs, Path)}. * The process is controlled by internally sending arguments and control signals on stdin, * and receiving control signals on stderr. The start method does not return until the * server is ready to process requests and has exited the bootstrap thread. @@ -66,7 +66,7 @@ public class ServerProcess { // this allows mocking the process building by tests interface OptionsBuilder { - List getJvmOptions(Path configDir, Path modulesDir, Path pluginsDir, Path tmpDir, String envOptions) + List getJvmOptions(ServerArgs args, Path configDir, Path pluginsDir, Path tmpDir, String envOptions) throws InterruptedException, IOException, UserException; } @@ -85,9 +85,9 @@ interface ProcessStarter { * @return A running server process that is ready for requests * @throws UserException If the process failed during bootstrap */ - public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path modulesDir, Path pluginsDir) + public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) throws UserException { - return start(terminal, processInfo, args, modulesDir, pluginsDir, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); + return start(terminal, processInfo, args, pluginsDir, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); } // package private so tests can mock options building and process starting @@ -95,7 +95,6 @@ static ServerProcess start( Terminal terminal, ProcessInfo processInfo, ServerArgs args, - Path modulesDir, Path pluginsDir, OptionsBuilder optionsBuilder, ProcessStarter processStarter @@ -105,7 +104,7 @@ static ServerProcess start( boolean success = false; try { - jvmProcess = createProcess(processInfo, args.configDir(), modulesDir, pluginsDir, optionsBuilder, processStarter); + jvmProcess = createProcess(args, processInfo, args.configDir(), pluginsDir, optionsBuilder, processStarter); errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); errorPump.start(); sendArgs(args, jvmProcess.getOutputStream()); @@ -198,9 +197,9 @@ private void sendShutdownMarker() { } private static Process createProcess( + ServerArgs args, ProcessInfo processInfo, Path configDir, - Path modulesDir, Path pluginsDir, OptionsBuilder optionsBuilder, ProcessStarter processStarter @@ -211,7 +210,7 @@ private static Process createProcess( envVars.put("LIBFFI_TMPDIR", tempDir.toString()); } - List jvmOptions = optionsBuilder.getJvmOptions(configDir, modulesDir, pluginsDir, tempDir, envVars.remove("ES_JAVA_OPTS")); + List jvmOptions = optionsBuilder.getJvmOptions(args, configDir, pluginsDir, tempDir, envVars.remove("ES_JAVA_OPTS")); // also pass through distribution type jvmOptions.add("-Des.distribution.type=" + processInfo.sysprops().get("es.distribution.type")); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index c967cf31aee7e..1f1d86f6f1c71 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -92,7 +92,7 @@ public void resetEnv() { envVars.clear(); esHomeDir = createTempDir(); nodeSettings = Settings.builder(); - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> new ArrayList<>(); + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> new ArrayList<>(); processValidator = null; mainCallback = null; } @@ -205,7 +205,6 @@ ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePass terminal, pinfo, args, - esHomeDir.resolve("modules"), esHomeDir.resolve("plugins"), optionsBuilder, starter @@ -261,7 +260,7 @@ public void testStartError() throws Exception { } public void testOptionsBuildingInterrupted() throws Exception { - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> { throw new InterruptedException("interrupted while get jvm options"); }; var e = expectThrows(RuntimeException.class, () -> runForeground()); @@ -287,7 +286,7 @@ public void testLibffiEnv() throws Exception { } public void testTempDir() throws Exception { - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); assertThat(tmpDir.getFileName().toString(), startsWith("elasticsearch-")); return new ArrayList<>(); @@ -299,7 +298,7 @@ public void testTempDirWindows() throws Exception { Path baseTmpDir = createTempDir(); sysprops.put("os.name", "Windows 10"); sysprops.put("java.io.tmpdir", baseTmpDir.toString()); - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); assertThat(tmpDir.getFileName().toString(), equalTo("elasticsearch")); assertThat(tmpDir.getParent().toString(), equalTo(baseTmpDir.toString())); @@ -311,7 +310,7 @@ public void testTempDirWindows() throws Exception { public void testTempDirOverride() throws Exception { Path customTmpDir = createTempDir(); envVars.put("ES_TMPDIR", customTmpDir.toString()); - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> { assertThat(tmpDir.toString(), equalTo(customTmpDir.toString())); return new ArrayList<>(); }; @@ -337,7 +336,7 @@ public void testTempDirOverrideNotADirectory() throws Exception { public void testCustomJvmOptions() throws Exception { envVars.put("ES_JAVA_OPTS", "-Dmyoption=foo"); - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> { + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> { assertThat(envOptions, equalTo("-Dmyoption=foo")); return new ArrayList<>(); }; @@ -346,7 +345,7 @@ public void testCustomJvmOptions() throws Exception { } public void testCommandLineSysprops() throws Exception { - optionsBuilder = (configDir, modulesDir, pluginsDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz"); + optionsBuilder = (args, configDir, pluginsDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz"); processValidator = pb -> { assertThat(pb.command(), contains("-Dfoo1=bar")); assertThat(pb.command(), contains("-Dfoo2=bar")); diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index c358d513a74dd..ebd2e74fddf43 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -35,7 +35,7 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { @Override public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { var args = new ServerArgs(false, true, null, new SecureString(""), env.settings(), env.configFile()); - this.server = ServerProcess.start(terminal, processInfo, args, env.modulesFile(), env.pluginsFile()); + this.server = ServerProcess.start(terminal, processInfo, args, env.pluginsFile()); // start does not return until the server is ready, and we do not wait for the process } diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index 245f13422c1ef..98c0f22e46aa4 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -22,6 +22,8 @@ dependencies { dockerCompose { environment.put 'STACK_VERSION', VersionProperties.elasticsearch + removeContainers = false + retainContainersOnStartupFailure = false } elasticsearch_distributions { diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml index 85bdecff53c86..2dad6a67a79f5 100644 --- a/qa/apm/docker-compose.yml +++ b/qa/apm/docker-compose.yml @@ -31,16 +31,12 @@ services: options: max-file: "5" max-size: 2m - # ports: - # - 127.0.0.1:8220:8220 - # - 127.0.0.1:8200:8200 volumes: - /var/run/docker.sock:/var/run/docker.sock - ./scripts/tls/apmserver/cert.crt:/usr/share/apmserver/config/certs/tls.crt - ./scripts/tls/apmserver/key.pem:/usr/share/apmserver/config/certs/tls.key elasticsearch: - entrypoint: /entrypoint.sh environment: - action.destructive_requires_name=false - bootstrap.memory_lock=true @@ -49,8 +45,8 @@ services: - discovery.type=single-node - ES_JAVA_OPTS=-Xms1g -Xmx1g - indices.id_field_data.enabled=true + - ingest.geoip.downloader.enabled=false - path.repo=/usr/share/elasticsearch/data/backups - - xpack.apm.tracing.enabled=true - xpack.license.self_generated.type=trial - xpack.monitoring.collection.enabled=true - xpack.security.authc.anonymous.roles=remote_monitoring_collector @@ -59,6 +55,13 @@ services: - xpack.security.authc.realms.native.native1.order=1 - xpack.security.authc.token.enabled=true - xpack.security.enabled=true + # APM specific settings. We don't configure `secret_key` because Kibana is configured with a blank key + - xpack.apm.tracing.enabled=true + - xpack.apm.tracing.agent.server_url=http://apmserver:8200 + # Send traces to APM server aggressively + - xpack.apm.tracing.agent.metrics_interval=1s + # Record everything + - xpack.apm.tracing.agent.transaction_sample_rate=1 healthcheck: interval: 20s retries: 10 diff --git a/qa/apm/entrypoint.sh b/qa/apm/entrypoint.sh deleted file mode 100755 index 24fb4316fb91c..0000000000000 --- a/qa/apm/entrypoint.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -# Custom entrypoint to generate the `elasticapm.properties` file. This is a -# script instead of a static file so that the `service_version` can be set -# correctly, although for the purposes of this test, that may be pointless. - -set -eo pipefail - -cd /usr/share/elasticsearch/ - -sed -i -e ' - s|enabled: .*|enabled: true| - s|# server_url: .*|server_url: http://apmserver:8200| - s|# secret_token:.*|secret_token: | - s|metrics_interval:.*|metrics_interval: 1s| -' config/elasticapm.properties - -exec /usr/local/bin/docker-entrypoint.sh diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index 114e1a2e31558..28154ae869a57 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -9,16 +9,13 @@ package org.elasticsearch.xpack.apm; import org.elasticsearch.client.Request; -import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.Before; import java.io.IOException; import java.util.List; @@ -33,19 +30,6 @@ */ public class ApmIT extends ESRestTestCase { - @Before - public void configureTracing() throws IOException { - final RequestOptions requestOptions = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build(); - - final Request request = new Request("PUT", "/_cluster/settings"); - request.setOptions(requestOptions); - // The default sample rate is lower, meaning the traces that want to record might be skipped. - request.setJsonEntity(""" - { "persistent": { "xpack.apm.tracing.agent.transaction_sample_rate": "1.0" } } - """); - assertOK(client().performRequest(request)); - } - /** * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. The traces are generated in * a separate Docker container, which continually fetches `/_notes/stats`. diff --git a/run.sh b/run.sh index 9cc95fd52e80c..5517de43073c9 100755 --- a/run.sh +++ b/run.sh @@ -6,24 +6,33 @@ set -eo pipefail export JAVA_HOME='' AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) +ES_VERSION=$(awk '/^elasticsearch/ { print $3 }' build-tools-internal/version.properties) # This is the path that `./gradlew localDistro` prints out at the end -cd build/distribution/local/elasticsearch-8.3.0-SNAPSHOT +cd "build/distribution/local/elasticsearch-${ES_VERSION}-SNAPSHOT" # URL and token for sending traces SERVER_URL="" SECRET_TOKEN="" +if ! grep -q xpack.apm.tracing.enabled config/elasticsearch.yml ; then + echo "xpack.apm.tracing.enabled: true" >> config/elasticsearch.yml +fi +if ! grep -q xpack.apm.tracing.agent.server_url config/elasticsearch.yml ; then + echo "xpack.apm.tracing.agent.server_url: $SERVER_URL" >> config/elasticsearch.yml +fi + # Configure the ES keystore, so that we can use `elastic:password` for REST # requests if [[ ! -f config/elasticsearch.keystore ]]; then ./bin/elasticsearch-keystore create - echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' + echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' + echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x "xpack.apm.tracing.secret_token" fi # Optional - override the agent jar -# OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.31.1-SNAPSHOT/elastic-apm-agent-1.31.1-SNAPSHOT.jar" +OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.31.1-SNAPSHOT/elastic-apm-agent-1.31.1-SNAPSHOT.jar" if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then # Copy in WIP agent @@ -34,17 +43,20 @@ fi #  1. Enable the agent # 2. Set the server URL # 3. Set the secret token -perl -p -i -e " s|enabled: false|enabled: true| ; s|# server_url.*|server_url: $SERVER_URL| ; s|# secret_token.*|secret_token: $SECRET_TOKEN|" config/elasticapm.properties +# perl -p -i -e " s|enabled: false|enabled: true| ; s|# server_url.*|server_url: $SERVER_URL| ; s|# secret_token.*|secret_token: $SECRET_TOKEN|" config/elasticapm.properties # perl -p -i -e " s|log_level: error|log_level: debug| " config/elasticapm.properties # Require a debugger on 5007 in order to run: # export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:5007 " # Just run but expose a debugging server on 5007 -# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007 " +# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5007 " # Hardcore security manager debugging # export ES_JAVA_OPTS="-Djava.security.debug=failure" # export ES_JAVA_OPTS="-Djava.security.debug=access,failure" -exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true -Eingest.geoip.downloader.enabled=false +# export ES_JAVA_OPTS=" -ea " + +# exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true -Eingest.geoip.downloader.enabled=false +exec ./bin/elasticsearch -Eingest.geoip.downloader.enabled=false diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b12b568ea7b01..ce1d96d971fa1 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -387,6 +387,8 @@ protected Node( ); } + deleteTemporaryApmConfig(jvmInfo); + this.pluginsService = pluginServiceCtor.apply(tmpSettings); final Settings settings = mergePluginSettings(pluginsService.pluginMap(), tmpSettings); @@ -1036,6 +1038,36 @@ protected Node( } } + /** + * If the JVM was started with the Elastic APM agent and a config file argument was specified, then + * delete the config file. The agent only reads it once, when supplied in this fashion, and it + * may contain a secret token. + */ + private void deleteTemporaryApmConfig(JvmInfo jvmInfo) { + for (String inputArgument : jvmInfo.getInputArguments()) { + if (inputArgument.startsWith("-javaagent:")) { + final String agentArg = inputArgument.substring(11); + final String[] parts = agentArg.split("=", 2); + if (parts[0].matches("modules/x-pack-apm-integration/elastic-apm-agent-\\d+\\.\\d+\\.\\d+\\.jar")) { + if (parts.length == 2 && parts[1].startsWith("c=")) { + final Path apmConfig = Path.of(parts[1].substring(2)); + if (apmConfig.getFileName().toString().matches("^\\.elstcapm\\..*\\.tmp")) { + try { + Files.deleteIfExists(apmConfig); + } catch (IOException e) { + logger.error( + "Failed to delete temporary APM config file [" + apmConfig + "], reason: [" + e.getMessage() + "]", + e + ); + } + } + } + return; + } + } + } + } + private Tracer getTracer(Collection pluginComponents) { final List tracers = pluginComponents.stream().map(c -> c instanceof Tracer t ? t : null).filter(Objects::nonNull).toList(); diff --git a/x-pack/plugin/apm-integration/build.gradle b/x-pack/plugin/apm-integration/build.gradle index b1225f69bd0dc..ef5aa4ade0928 100644 --- a/x-pack/plugin/apm-integration/build.gradle +++ b/x-pack/plugin/apm-integration/build.gradle @@ -5,15 +5,12 @@ * 2.0. */ apply plugin: 'elasticsearch.internal-es-plugin' -// apply plugin: 'elasticsearch.internal-cluster-test' -// apply plugin: 'elasticsearch.internal-test-artifact' esplugin { name 'x-pack-apm-integration' description 'Provides APM integration for Elasticsearch' classname 'org.elasticsearch.xpack.apm.APM' extendedPlugins = ['x-pack-core'] - javaOpts = "-javaagent:modules/x-pack-apm-integration/elastic-apm-agent-${versions.apm_agent}.jar" } dependencies { diff --git a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties b/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties deleted file mode 100644 index b88785bbba3a1..0000000000000 --- a/x-pack/plugin/apm-integration/src/main/config/elasticapm.properties +++ /dev/null @@ -1,55 +0,0 @@ -# Set to `true` to enable the APM agent. Setting it to `false` completely -# disables the agent. You can toggle it on and off at runtime using the -# `recording` setting. -enabled: false - -# Identifies the version of Elasticsearch in the captured trace data. -service_version: @es.version@ - -# ES does not use auto-instrumentation. -instrument: false - -# Required for OpenTelemetry support -enable_experimental_instrumentations: true - -# Configures the APM server to which traces will be sent. -# server_url: - -# Configures the token to present to the server for authn / authz. -# secret_token: - -# This is used to keep all the errors and transactions of your service -# together and is the primary filter in the Elastic APM user interface. -service_name: elasticsearch - -# If set, this name is used to distinguish between different nodes of a -# service, therefore it should be unique for each JVM within a service. If -# not set, data aggregations will be done based on a container ID (where -# valid) or on the reported hostname (automatically discovered or manually -# configured through hostname). -# -# service_node_name: node1 - -# An arbitrary string that identifies this deployment environment. For -# example, "dev", "staging" or "prod". Can be anything you like, but must -# have the same value across different systems in the same deployment -# environment. -environment: dev - -# Logging configuration. Unless you need detailed logs about what the APM -# is doing, leave this value alone. -log_level: error - -# Configures a log file to write to. `_AGENT_HOME_` is a placeholder used -# by the agent. Don't disable writing to a log file, as the agent will then -# require extra Security Manager permissions and it's just painful. -log_file: _AGENT_HOME_/../../logs/apm.log - -application_packages: org.elasticsearch,org.apache.lucene - -# Enable if you want APM to poll the APM server for updated configuration -central_config: false - -metrics_interval: 120s - -breakdown_metrics: false diff --git a/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options b/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options deleted file mode 100644 index 0b5afdccfdf2f..0000000000000 --- a/x-pack/plugin/apm-integration/src/main/config/jvm.options.d/apm.options +++ /dev/null @@ -1,2 +0,0 @@ -# Elastic APM agent config file --Delastic.apm.config_file=${ES_PATH_CONF}/elasticapm.properties diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 6817ee9db3bbb..5bb5fea7a119f 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -50,8 +50,14 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - tracer.set(new APMTracer(settings, clusterService, new APMAgentSettings())); - return List.of(tracer.get()); + final APMAgentSettings apmAgentSettings = new APMAgentSettings(); + final APMTracer apmTracer = new APMTracer(settings, clusterService); + + apmAgentSettings.syncAgentSystemProperties(settings); + apmAgentSettings.addClusterSettingsListeners(clusterService, apmTracer); + + tracer.set(apmTracer); + return List.of(apmTracer); } @Override @@ -60,7 +66,8 @@ public List> getSettings() { APMAgentSettings.APM_ENABLED_SETTING, APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, - APMAgentSettings.APM_AGENT_SETTINGS + APMAgentSettings.APM_AGENT_SETTINGS, + APMAgentSettings.APM_TOKEN_SETTING ); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index dc2210942254f..3be1acee75474 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -9,8 +9,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Assertions; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; import java.security.AccessController; @@ -34,10 +38,41 @@ class APMAgentSettings { */ // tag::noformat static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( - "transaction_sample_rate", "0.5" + "transaction_sample_rate", "0.2" ); // end::noformat + void addClusterSettingsListeners(ClusterService clusterService, APMTracer apmTracer) { + final ClusterSettings clusterSettings = clusterService.getClusterSettings(); + clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, enabled -> { + apmTracer.setEnabled(enabled); + // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to + // minimise its impact to a running Elasticsearch. + this.setAgentSetting("recording", Boolean.toString(enabled)); + }); + clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, apmTracer::setIncludeNames); + clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_EXCLUDE_SETTING, apmTracer::setExcludeNames); + clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(this::setAgentSetting), (x, y) -> {}); + } + + void syncAgentSystemProperties(Settings settings) { + this.setAgentSetting("recording", Boolean.toString(APM_ENABLED_SETTING.get(settings))); + + // Apply default values for some system properties. Although we configure + // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't + // do anything if those settings are never configured. + APM_AGENT_DEFAULT_SETTINGS.keySet() + .forEach( + key -> this.setAgentSetting( + key, + APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings) + ) + ); + + // Then apply values from the settings in the cluster state + APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); + } + @SuppressForbidden(reason = "Need to be able to manipulate APM agent-related properties to set them dynamically") void setAgentSetting(String key, String value) { final String completeKey = "elastic.apm." + Objects.requireNonNull(key); @@ -53,190 +88,15 @@ void setAgentSetting(String key, String value) { }); } - /** - * Lists all known APM agent configuration keys. - */ - private static final List AGENT_KEYS = List.of( - // Circuit-Breaker configuration options - "circuit_breaker_enabled", - "stress_monitoring_interval", - "stress_monitor_gc_stress_threshold", - "stress_monitor_gc_relief_threshold", - "stress_monitor_cpu_duration_threshold", - "stress_monitor_system_cpu_stress_threshold", - "stress_monitor_system_cpu_relief_threshold", - - // Core configuration options - "recording", - "enabled", - "instrument", - "service_name", - "service_node_name", - "service_version", - "hostname", - "environment", - "transaction_sample_rate", - "transaction_max_spans", - "sanitize_field_names", - "enable_instrumentations", - "disable_instrumentations", - "enable_experimental_instrumentations", - "unnest_exceptions", - "ignore_exceptions", - "capture_body", - "capture_headers", - "global_labels", - "classes_excluded_from_instrumentation", - "trace_methods", - "trace_methods_duration_threshold", - "central_config", - "breakdown_metrics", - "config_file", - "plugins_dir", - "use_elastic_traceparent_header", - "span_min_duration", - "cloud_provider", - "enable_public_api_annotation_inheritance", - - // HTTP configuration options - "capture_body_content_types", - "transaction_ignore_urls", - "transaction_ignore_user_agents", - "use_path_as_transaction_name", - "url_groups", - - // Huge Traces configuration options - "span_compression_enabled", - "span_compression_exact_match_max_duration", - "span_compression_same_kind_max_duration", - "exit_span_min_duration", - - // JAX-RS configuration options - "enable_jaxrs_annotation_inheritance", - "use_jaxrs_path_as_transaction_name", - - // JMX configuration options - "capture_jmx_metrics", - - // Logging configuration options - "log_level", - "log_file", - "log_ecs_reformatting", - "log_ecs_reformatting_additional_fields", - "log_ecs_formatter_allow_list", - "log_ecs_reformatting_dir", - "log_file_size", - "log_format_sout", - "log_format_file", - - // Messaging configuration options - "ignore_message_queues", - - // Metrics configuration options - "dedot_custom_metrics", - - // Profiling configuration options - "profiling_inferred_spans_enabled", - "profiling_inferred_spans_sampling_interval", - "profiling_inferred_spans_min_duration", - "profiling_inferred_spans_included_classes", - "profiling_inferred_spans_excluded_classes", - "profiling_inferred_spans_lib_directory", - - // Reporter configuration options - "secret_token", - "api_key", - "server_url", - "server_urls", - "disable_send", - "server_timeout", - "verify_server_cert", - "max_queue_size", - "include_process_args", - "api_request_time", - "api_request_size", - "metrics_interval", - "disable_metrics", - - // Serverless configuration options - "aws_lambda_handler", - "data_flush_timeout", - - // Stacktrace configuration options - "application_packages", - "stack_trace_limit", - "span_stack_trace_min_duration" - ); - - /** - * Lists all APM configuration keys that are not dynamic and must be configured via the config file. - */ - private static final List STATIC_AGENT_KEYS = List.of( - "enabled", - "service_name", - "service_node_name", - "service_version", - "hostname", - "environment", - "global_labels", - "trace_methods_duration_threshold", - "breakdown_metrics", - "plugins_dir", - "cloud_provider", - "stress_monitoring_interval", - "log_ecs_reformatting_additional_fields", - "log_ecs_formatter_allow_list", - "log_ecs_reformatting_dir", - "log_file_size", - "log_format_sout", - "log_format_file", - "profiling_inferred_spans_lib_directory", - "secret_token", - "api_key", - "verify_server_cert", - "max_queue_size", - "include_process_args", - "metrics_interval", - "disable_metrics", - "data_flush_timeout" - ); + private static final String APM_SETTING_PREFIX = "xpack.apm.tracing."; - /** - * Lists APM agent configuration keys that cannot be configured via the cluster settings REST API. - * This may be because the setting's value must not be changed at runtime, or because it relates - * to a feature that is not required for tracing with Elasticsearch, but which configuring could - * impact performance. - */ private static final List PROHIBITED_AGENT_KEYS = List.of( - // ES doesn't use dynamic instrumentation - "instrument", - "enable_instrumentations", - "disable_instrumentations", - "classes_excluded_from_instrumentation", - "enable_public_api_annotation_inheritance", - - // We don't use JAX-RS - "enable_jaxrs_annotation_inheritance", - "use_jaxrs_path_as_transaction_name", - - // Must be enabled to use OpenTelemetry - "enable_experimental_instrumentations", - - // For now, we don't use central config - "central_config", - - // Config file path can't be changed + // ES generates a config file and sets this value "config_file", - - // The use case for capturing traces but not sending them doesn't apply to ES - "disable_send", - - // We don't run ES in an AWS Lambda - "aws_lambda_handler" + // ES controls this via `xpack.apm.tracing.enabled` + "recording" ); - static final String APM_SETTING_PREFIX = "xpack.apm.tracing."; - static final Setting.AffixSetting APM_AGENT_SETTINGS = Setting.prefixKeySetting( APM_SETTING_PREFIX + "agent.", (qualifiedKey) -> { @@ -244,27 +104,9 @@ void setAgentSetting(String key, String value) { final String key = parts[parts.length - 1]; final String defaultValue = APM_AGENT_DEFAULT_SETTINGS.getOrDefault(key, ""); return new Setting<>(qualifiedKey, defaultValue, (value) -> { - // The `Setting` constructor asserts that a setting's parser doesn't return null when called with the default - // value. This makes less sense for prefix settings, but is particularly problematic here since we validate - // the setting name and reject unknown keys. Thus, if assertions are enabled, we have to tolerate the "_na_" key, - // which comes from `Setting#prefixKeySetting()`. - if (Assertions.ENABLED && qualifiedKey.equals("_na_")) { - return value; - } - if (AGENT_KEYS.contains(key) == false) { - throw new IllegalArgumentException("Unknown APM configuration key: [" + qualifiedKey + "]"); - } - if (STATIC_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException( - "Cannot set [" - + qualifiedKey - + "] as it is not a dynamic setting - configure it via [config/elasticapm.properties] instead" - ); - } if (PROHIBITED_AGENT_KEYS.contains(key)) { - throw new IllegalArgumentException("Configuring [" + qualifiedKey + "] is prohibited with Elasticsearch"); + throw new IllegalArgumentException("Explicitly configuring [" + qualifiedKey + "] is prohibited"); } - return value; }, Setting.Property.NodeScope, Setting.Property.OperatorDynamic); } @@ -292,4 +134,9 @@ void setAgentSetting(String key, String value) { OperatorDynamic, NodeScope ); + + static final Setting APM_TOKEN_SETTING = SecureSetting.secureString( + APM_SETTING_PREFIX + "secret_token", + null + ); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index ae2d9ef6f38d5..4f07e82607fe8 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -26,7 +26,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -39,11 +38,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_DEFAULT_SETTINGS; -import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; @@ -61,49 +57,22 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private List includeNames; private List excludeNames; private volatile CharacterRunAutomaton filterAutomaton; - private final APMAgentSettings apmAgentSettings; /** * This class is required to make all open telemetry services visible at once */ record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} - public APMTracer(Settings settings, ClusterService clusterService, APMAgentSettings apmAgentSettings) { - this.clusterService = Objects.requireNonNull(clusterService); + public APMTracer(Settings settings, ClusterService clusterService) { this.includeNames = APM_TRACING_NAMES_INCLUDE_SETTING.get(settings); this.excludeNames = APM_TRACING_NAMES_EXCLUDE_SETTING.get(settings); this.filterAutomaton = buildAutomaton(includeNames, excludeNames); - this.apmAgentSettings = apmAgentSettings; - this.enabled = APM_ENABLED_SETTING.get(settings); - this.apmAgentSettings.setAgentSetting("recording", Boolean.toString(this.enabled)); - - // Apply default values for some system properties. Although we configure - // the settings in APM_AGENT_DEFAULT_SETTINGS to defer to the default values, they won't - // do anything if those settings are never configured. - APM_AGENT_DEFAULT_SETTINGS.keySet() - .forEach( - key -> apmAgentSettings.setAgentSetting( - key, - APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings) - ) - ); - - // Then apply values from the settings in the cluster state - APM_AGENT_SETTINGS.getAsMap(settings).forEach(apmAgentSettings::setAgentSetting); - - final ClusterSettings clusterSettings = clusterService.getClusterSettings(); - clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, this::setEnabled); - clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_INCLUDE_SETTING, this::setIncludeNames); - clusterSettings.addSettingsUpdateConsumer(APM_TRACING_NAMES_EXCLUDE_SETTING, this::setExcludeNames); - clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(apmAgentSettings::setAgentSetting), (x, y) -> {}); + this.clusterService = clusterService; } - private void setEnabled(boolean enabled) { + void setEnabled(boolean enabled) { this.enabled = enabled; - // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to - // minimise its impact to a running Elasticsearch. - this.apmAgentSettings.setAgentSetting("recording", Boolean.toString(enabled)); if (enabled) { this.services = createApmServices(); } else { @@ -111,12 +80,12 @@ private void setEnabled(boolean enabled) { } } - private void setIncludeNames(List includeNames) { + void setIncludeNames(List includeNames) { this.includeNames = includeNames; this.filterAutomaton = buildAutomaton(includeNames, excludeNames); } - private void setExcludeNames(List excludeNames) { + void setExcludeNames(List excludeNames) { this.excludeNames = excludeNames; this.filterAutomaton = buildAutomaton(includeNames, excludeNames); } diff --git a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMAgentSettingsTests.java b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMAgentSettingsTests.java new file mode 100644 index 0000000000000..c49c71718f382 --- /dev/null +++ b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMAgentSettingsTests.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apm; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; +import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class APMAgentSettingsTests extends ESTestCase { + + /** + * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. + */ + public void test_whenTracerEnabled_setsRecordingProperty() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + verify(apmAgentSettings).setAgentSetting("recording", "true"); + } + + /** + * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. + */ + public void test_whenTracerDisabled_setsRecordingProperty() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), false).build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + verify(apmAgentSettings).setAgentSetting("recording", "false"); + } + + /** + * Check that when cluster settings are synchronised with the system properties, default values are + * applied. + */ + public void test_whenTracerCreated_defaultSettingsApplied() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.2"); + } + + /** + * Check that when cluster settings are synchronised with the system properties, values in the settings + * are reflected in the system properties, overwriting default values. + */ + public void test_whenTracerCreated_clusterSettingsOverrideDefaults() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .put(APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75") + .build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + // This happens twice because we first apply the default settings, whose values are overridden + // from the cluster settings, then we apply all the APM-agent related settings, not just the + // ones with default values. Although there is some redundancy here, it only happens at startup + // for a very small number of settings. + verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75"); + } + + /** + * Check that when cluster settings are synchronised with the system properties, agent settings other + * than those with default values are set. + */ + public void test_whenTracerCreated_clusterSettingsAlsoApplied() { + APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); + Settings settings = Settings.builder() + .put(APM_ENABLED_SETTING.getKey(), true) + .put(APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") + .build(); + apmAgentSettings.syncAgentSystemProperties(settings); + + verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); + } +} diff --git a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java index 08190c2bbd371..661a5868ee8be 100644 --- a/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java +++ b/x-pack/plugin/apm-integration/src/test/java/org/elasticsearch/xpack/apm/APMTracerTests.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_AGENT_SETTINGS; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_ENABLED_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; @@ -29,11 +28,7 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class APMTracerTests extends ESTestCase { @@ -95,61 +90,6 @@ public void test_onTraceStopped_stopsTrace() { assertThat(apmTracer.getSpans(), anEmptyMap()); } - /** - * Check that when the tracer starts, it applies the default values for some agent settings to the system properties. - */ - public void test_whenTracerCreated_defaultSettingsApplied() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); - buildTracer(settings, apmAgentSettings); - - verify(apmAgentSettings).setAgentSetting("transaction_sample_rate", "0.5"); - } - - /** - * Check that when the tracer starts and applies the default agent setting values the system properties, their values - * are overridden from the cluster settings, if the cluster settings contain values for those agent settings. - */ - public void test_whenTracerCreated_clusterSettingsOverrideDefaults() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder() - .put(APM_ENABLED_SETTING.getKey(), true) - .put(APM_AGENT_SETTINGS.getKey() + "transaction_sample_rate", "0.75") - .build(); - buildTracer(settings, apmAgentSettings); - - // This happens twice because we first apply the default settings, whose values are overridden - // from the cluster settings, then we apply all the APM-agent related settings, not just the - // ones with default values. Although there is some redundancy here, it only happens at startup - // for a very small number of settings. - verify(apmAgentSettings, times(2)).setAgentSetting("transaction_sample_rate", "0.75"); - } - - /** - * Check that when the tracer starts, it applies all other agent settings to the system properties. - */ - public void test_whenTracerCreated_clusterSettingsAlsoApplied() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder() - .put(APM_ENABLED_SETTING.getKey(), true) - .put(APM_AGENT_SETTINGS.getKey() + "span_compression_enabled", "true") - .build(); - buildTracer(settings, apmAgentSettings); - - verify(apmAgentSettings).setAgentSetting("span_compression_enabled", "true"); - } - - /** - * Check that when the tracer is enabled, it also sets the APM agent's recording system property to true. - */ - public void test_whenTracerEnabled_setsRecordingProperty() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), true).build(); - buildTracer(settings, apmAgentSettings); - - verify(apmAgentSettings).setAgentSetting("recording", "true"); - } - /** * Check that when a trace is started, then the thread context is updated with tracing information. *

@@ -167,17 +107,6 @@ public void test_whenTraceStarted_threadContextIsPopulated() { assertThat(threadContext.getTransient(Task.APM_TRACE_CONTEXT), notNullValue()); } - /** - * Check that when the tracer is disabled, it also sets the APM agent's recording system property to false. - */ - public void test_whenTracerDisabled_setsRecordingProperty() { - APMAgentSettings apmAgentSettings = spy(new APMAgentSettings()); - Settings settings = Settings.builder().put(APM_ENABLED_SETTING.getKey(), false).build(); - buildTracer(settings, apmAgentSettings); - - verify(apmAgentSettings, atLeastOnce()).setAgentSetting("recording", "false"); - } - /** * Check that when a tracer has a list of include names configured, then those * names are used to filter spans. @@ -257,17 +186,13 @@ public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { } private APMTracer buildTracer(Settings settings) { - return buildTracer(settings, new APMAgentSettings()); - } - - private APMTracer buildTracer(Settings settings, APMAgentSettings apmAgentSettings) { APM apm = new APM(settings); ClusterService clusterService = mock(ClusterService.class); when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, new HashSet<>(apm.getSettings()))); when(clusterService.getClusterName()).thenReturn(new ClusterName("testCluster")); - APMTracer tracer = new APMTracer(settings, clusterService, apmAgentSettings); + APMTracer tracer = new APMTracer(settings, clusterService); tracer.doStart(); return tracer; } From feb3b3f5fe32e0d33e24ab0289ae8d6454f6b32c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 14 Jun 2022 16:20:38 +0100 Subject: [PATCH 82/90] Shorten APM settings prefix --- TRACING.md | 6 +++--- .../org/elasticsearch/server/cli/APMJvmOptions.java | 10 +++++----- qa/apm/docker-compose.yml | 8 ++++---- run.sh | 12 ++++++------ .../elasticsearch/xpack/apm/APMAgentSettings.java | 4 ++-- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/TRACING.md b/TRACING.md index 7359a2c96d08d..1f0d146b6c0b2 100644 --- a/TRACING.md +++ b/TRACING.md @@ -17,21 +17,21 @@ Elasticsearch to hard-code the use of an SDK. ## How is tracing configured? - * The `xpack.apm.tracing.enabled` setting must be set to `true` + * The `xpack.apm.enabled` setting must be set to `true` * The APM agent must be both enabled and configured with server credentials. See below. We have a config file in [`config/elasticapm.properties`][config], which configures settings that are not dynamic, or should not be changed at runtime. Other settings can be configured at runtime by using the cluster settings API, -and setting `xpack.apm.tracing.agent.` with a string value, where `` +and setting `xpack.apm.agent.` with a string value, where `` is the APM agent key that you want to configure. For example, to change the sampling rate: curl -XPUT \ -H "Content-type: application/json" \ -u "$USERNAME:$PASSWORD" \ - -d '{ "persistent": { "xpack.apm.tracing.agent.transaction_sample_rate": "0.75" } }' \ + -d '{ "persistent": { "xpack.apm.agent.transaction_sample_rate": "0.75" } }' \ https://localhost:9200/_cluster/settings ### More details about configuration diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 4183dca9efa45..0fe146c814ae5 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -120,7 +120,7 @@ public class APMJvmOptions { } public static List apmJvmOptions(Settings settings, KeyStoreWrapper keystore, Path tmpdir) throws UserException, IOException { - final boolean enabled = settings.getAsBoolean("xpack.apm.tracing.enabled", false); + final boolean enabled = settings.getAsBoolean("xpack.apm.enabled", false); if (enabled == false) { return List.of(); @@ -141,8 +141,8 @@ public static List apmJvmOptions(Settings settings, KeyStoreWrapper keys } } - if (keystore != null && keystore.getSettingNames().contains("xpack.apm.tracing.secret_token")) { - try (SecureString token = keystore.getString("xpack.apm.tracing.secret_token")) { + if (keystore != null && keystore.getSettingNames().contains("xpack.apm.secret_token")) { + try (SecureString token = keystore.getString("xpack.apm.secret_token")) { propertiesMap.put("secret_token", token.toString()); } } @@ -180,7 +180,7 @@ private static Map extractDynamicSettings(Map pr private static Map extractApmSettings(Settings settings) throws UserException { final Map propertiesMap = new HashMap<>(); - final Settings agentSettings = settings.getByPrefix("xpack.apm.tracing.agent."); + final Settings agentSettings = settings.getByPrefix("xpack.apm.agent."); agentSettings.keySet().forEach(key -> propertiesMap.put(key, String.valueOf(agentSettings.get(key)))); // These settings must not be changed @@ -188,7 +188,7 @@ private static Map extractApmSettings(Settings settings) throws if (propertiesMap.containsKey(key)) { throw new UserException( ExitCodes.CONFIG, - "Do not set a value for [xpack.apm.tracing.agent." + key + "], as this is configured automatically by Elasticsearch" + "Do not set a value for [xpack.apm.agent." + key + "], as this is configured automatically by Elasticsearch" ); } } diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml index 2dad6a67a79f5..4229ce47173e0 100644 --- a/qa/apm/docker-compose.yml +++ b/qa/apm/docker-compose.yml @@ -56,12 +56,12 @@ services: - xpack.security.authc.token.enabled=true - xpack.security.enabled=true # APM specific settings. We don't configure `secret_key` because Kibana is configured with a blank key - - xpack.apm.tracing.enabled=true - - xpack.apm.tracing.agent.server_url=http://apmserver:8200 + - xpack.apm.enabled=true + - xpack.apm.agent.server_url=http://apmserver:8200 # Send traces to APM server aggressively - - xpack.apm.tracing.agent.metrics_interval=1s + - xpack.apm.agent.metrics_interval=1s # Record everything - - xpack.apm.tracing.agent.transaction_sample_rate=1 + - xpack.apm.agent.transaction_sample_rate=1 healthcheck: interval: 20s retries: 10 diff --git a/run.sh b/run.sh index 5517de43073c9..a9c953fbd76a1 100755 --- a/run.sh +++ b/run.sh @@ -15,11 +15,11 @@ cd "build/distribution/local/elasticsearch-${ES_VERSION}-SNAPSHOT" SERVER_URL="" SECRET_TOKEN="" -if ! grep -q xpack.apm.tracing.enabled config/elasticsearch.yml ; then - echo "xpack.apm.tracing.enabled: true" >> config/elasticsearch.yml +if ! grep -q xpack.apm.enabled config/elasticsearch.yml ; then + echo "xpack.apm.enabled: true" >> config/elasticsearch.yml fi -if ! grep -q xpack.apm.tracing.agent.server_url config/elasticsearch.yml ; then - echo "xpack.apm.tracing.agent.server_url: $SERVER_URL" >> config/elasticsearch.yml +if ! grep -q xpack.apm.agent.server_url config/elasticsearch.yml ; then + echo "xpack.apm.agent.server_url: $SERVER_URL" >> config/elasticsearch.yml fi # Configure the ES keystore, so that we can use `elastic:password` for REST @@ -27,7 +27,7 @@ fi if [[ ! -f config/elasticsearch.keystore ]]; then ./bin/elasticsearch-keystore create echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' - echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x "xpack.apm.tracing.secret_token" + echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x "xpack.apm.secret_token" fi @@ -58,5 +58,5 @@ fi # export ES_JAVA_OPTS=" -ea " -# exec ./bin/elasticsearch -Expack.apm.tracing.enabled=true -Eingest.geoip.downloader.enabled=false +# exec ./bin/elasticsearch -Expack.apm.enabled=true -Eingest.geoip.downloader.enabled=false exec ./bin/elasticsearch -Eingest.geoip.downloader.enabled=false diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index 3be1acee75474..396d7fdac07d7 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -88,12 +88,12 @@ void setAgentSetting(String key, String value) { }); } - private static final String APM_SETTING_PREFIX = "xpack.apm.tracing."; + private static final String APM_SETTING_PREFIX = "xpack.apm."; private static final List PROHIBITED_AGENT_KEYS = List.of( // ES generates a config file and sets this value "config_file", - // ES controls this via `xpack.apm.tracing.enabled` + // ES controls this via `xpack.apm.enabled` "recording" ); From 762d6fee9e302c3488ae8bc53f4b051f7ec505ca Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 14 Jun 2022 16:24:50 +0100 Subject: [PATCH 83/90] Bump APM agent to 1.32.0 --- build-tools-internal/version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 02d3a1a702c31..1fadcf2004f64 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -56,5 +56,5 @@ jimfs_guava = 30.1-jre networknt_json_schema_validator = 1.0.48 # tracing -apm_agent = 1.30.1 +apm_agent = 1.32.0 opentelemetry = 1.12.0 From 9b2685d611caaf7b86dca2b091485e2e49231218 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 11:05:46 +0100 Subject: [PATCH 84/90] Upgrade opentelemetry --- build-tools-internal/version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1fadcf2004f64..8e5a9b9e0fbeb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -57,4 +57,4 @@ networknt_json_schema_validator = 1.0.48 # tracing apm_agent = 1.32.0 -opentelemetry = 1.12.0 +opentelemetry = 1.14.0 From e21346cff9841e69d07da31aad9fee45cc1c47ed Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 11:07:03 +0100 Subject: [PATCH 85/90] Javadoc --- .../server/cli/APMJvmOptions.java | 67 +++++++++++++++---- .../org/elasticsearch/tracing/Tracer.java | 4 ++ .../java/org/elasticsearch/xpack/apm/APM.java | 26 ++++++- .../xpack/apm/APMAgentSettings.java | 26 ++++++- .../elasticsearch/xpack/apm/APMTracer.java | 46 +++++++++++-- 5 files changed, 146 insertions(+), 23 deletions(-) diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 0fe146c814ae5..b074fe9da05f4 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import java.io.File; import java.io.FileOutputStream; @@ -29,8 +30,19 @@ import java.util.Optional; import java.util.Properties; -public class APMJvmOptions { +/** + * This class is responsible for working out if APM tracing is enabled, and if so, preparing + * a temporary config file for the APM Java agent and CLI options to the JVM to configure APM. + */ +class APMJvmOptions { + /** + * Contains agent configuration that must always be applied, and cannot be overridden. + */ private static final Map STATIC_CONFIG; + + /** + * Contains default configuration that will be used unless overridden by explicit configuration. + */ private static final Map CONFIG_DEFAULTS; /** @@ -75,9 +87,6 @@ public class APMJvmOptions { static { STATIC_CONFIG = new HashMap<>(); - // Required for OpenTelemetry support - STATIC_CONFIG.put("enable_experimental_instrumentations", "true"); - // Identifies the version of Elasticsearch in the captured trace data. STATIC_CONFIG.put("service_version", Version.CURRENT.toString()); @@ -119,7 +128,17 @@ public class APMJvmOptions { CONFIG_DEFAULTS.put("central_config", "false"); } - public static List apmJvmOptions(Settings settings, KeyStoreWrapper keystore, Path tmpdir) throws UserException, IOException { + /** + * This method works out if APM tracing is enabled, and if so, prepares a temporary config file + * for the APM Java agent and CLI options to the JVM to configure APM. The config file is temporary + * because it will be deleted once Elasticsearch starts. + * + * @param settings the Elasticsearch settings to consider + * @param keystore a wrapper to access the keystore, or null if there is no keystore + * @param tmpdir Elasticsearch's temporary directory, where the config file will be written + */ + static List apmJvmOptions(Settings settings, @Nullable KeyStoreWrapper keystore, Path tmpdir) throws UserException, + IOException { final boolean enabled = settings.getAsBoolean("xpack.apm.enabled", false); if (enabled == false) { @@ -141,18 +160,12 @@ public static List apmJvmOptions(Settings settings, KeyStoreWrapper keys } } - if (keystore != null && keystore.getSettingNames().contains("xpack.apm.secret_token")) { - try (SecureString token = keystore.getString("xpack.apm.secret_token")) { - propertiesMap.put("secret_token", token.toString()); - } - } - + extractSecureSettings(keystore, propertiesMap); final Map dynamicSettings = extractDynamicSettings(propertiesMap); final File tempFile = writeApmProperties(tmpdir, propertiesMap); final List options = new ArrayList<>(); - // Use an agent argument to specify the config file instead of e.g. `-Delastic.apm.config_file=...` // because then the agent won't try to reload the file, and we can remove it after startup. options.add("-javaagent:" + agentJar.get() + "=c=" + tempFile); @@ -162,6 +175,22 @@ public static List apmJvmOptions(Settings settings, KeyStoreWrapper keys return options; } + private static void extractSecureSettings(KeyStoreWrapper keystore, Map propertiesMap) { + if (keystore != null) { + for (String key : List.of("api_key", "secret_token")) { + if (keystore.getSettingNames().contains("xpack.apm." + key)) { + try (SecureString token = keystore.getString("xpack.apm." + key)) { + propertiesMap.put(key, token.toString()); + } + } + } + } + } + + /** + * Removes settings that can be changed dynamically at runtime from the supplied map, and returns + * those settings in a new map. + */ private static Map extractDynamicSettings(Map propertiesMap) { final Map cliOptionsMap = new HashMap<>(); @@ -199,6 +228,14 @@ private static Map extractApmSettings(Settings settings) throws return propertiesMap; } + /** + * Writes a Java properties file with data from supplied map to a temporary config, and returns + * the file that was created. + * @param tmpdir the directory for the file + * @param propertiesMap the data to write + * @return the file that was created + * @throws IOException if writing the file fails + */ private static File writeApmProperties(Path tmpdir, Map propertiesMap) throws IOException { final Properties p = new Properties(); p.putAll(propertiesMap); @@ -210,6 +247,12 @@ private static File writeApmProperties(Path tmpdir, Map properti return tempFile; } + /** + * The JVM argument that configure the APM agent needs to specify the agent jar path, so this method + * finds the jar by inspecting the filesystem. + * @return the agent jar file + * @throws IOException if a problem occurs reading the filesystem + */ private static Optional findAgentJar() throws IOException { final Path apmModule = Path.of("modules/x-pack-apm-integration"); diff --git a/server/src/main/java/org/elasticsearch/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/tracing/Tracer.java index 0afa37e1d49a5..bae8ca0613042 100644 --- a/server/src/main/java/org/elasticsearch/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/tracing/Tracer.java @@ -43,6 +43,10 @@ public interface Tracer { Releasable withScope(Traceable traceable); + /** + * A Tracer implementation that does nothing. This is used when no tracer is configured, + * in order to avoid null checks everywhere. + */ Tracer NOOP = new Tracer() { @Override public void onTraceStarted(ThreadContext threadContext, Traceable traceable) {} diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java index 5bb5fea7a119f..77fe3527e5a90 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APM.java @@ -28,6 +28,26 @@ import java.util.List; import java.util.function.Supplier; +/** + * This module integrates Elastic's APM product with Elasticsearch. Elasticsearch has + * a {@link org.elasticsearch.tracing.Tracer} interface, which this module implements via + * {@link APMTracer}. We use the OpenTelemetry API to capture "spans", and attach the + * Elastic APM Java to ship those spans to an APM server. Although it is possible to + * programmatically attach the agent, the Security Manager permissions required for this + * make this approach excessively difficult. + *

+ * All settings are found under the xpack.apm. prefix. Any setting under + * the xpack.apm.agent. prefix will be forwarded on to the APM Java agent + * by setting appropriate system properties. Some settings can only be set once, and must be + * set when the agent starts. We therefore also create and configure a config file in + * the {@code APMJvmOptions} class, which we then delete when Elasticsearch starts, so that + * sensitive settings such as secret_token or api_key are not + * left on disk. + *

+ * When settings are reconfigured using the settings REST API, the new values will again + * be passed via system properties to the Java agent, which periodically checks for changes + * and applies the new settings values, provided those settings can be dynamically updated. + */ public class APM extends Plugin implements NetworkPlugin { private final SetOnce tracer = new SetOnce<>(); private final Settings settings; @@ -53,10 +73,11 @@ public Collection createComponents( final APMAgentSettings apmAgentSettings = new APMAgentSettings(); final APMTracer apmTracer = new APMTracer(settings, clusterService); + tracer.set(apmTracer); + apmAgentSettings.syncAgentSystemProperties(settings); apmAgentSettings.addClusterSettingsListeners(clusterService, apmTracer); - tracer.set(apmTracer); return List.of(apmTracer); } @@ -67,7 +88,8 @@ public List> getSettings() { APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING, APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING, APMAgentSettings.APM_AGENT_SETTINGS, - APMAgentSettings.APM_TOKEN_SETTING + APMAgentSettings.APM_SECRET_TOKEN_SETTING, + APMAgentSettings.APM_API_KEY_SETTING ); } } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index 396d7fdac07d7..1d9f9d022d80d 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -28,13 +28,17 @@ import static org.elasticsearch.common.settings.Setting.Property.NodeScope; import static org.elasticsearch.common.settings.Setting.Property.OperatorDynamic; +/** + * This class is responsible for APM settings, both for Elasticsearch and the APM Java agent. + * The methods could all be static, however they are not in order to make unit testing easier. + */ class APMAgentSettings { private static final Logger LOGGER = LogManager.getLogger(APMAgentSettings.class); /** * Sensible defaults that Elasticsearch configures. This cannot be done via the APM agent - * config file, as then their values cannot be overridden dynamically via system properties. + * config file, as then their values could not be overridden dynamically via system properties. */ // tag::noformat static Map APM_AGENT_DEFAULT_SETTINGS = Map.of( @@ -55,6 +59,10 @@ void addClusterSettingsListeners(ClusterService clusterService, APMTracer apmTra clusterSettings.addAffixMapUpdateConsumer(APM_AGENT_SETTINGS, map -> map.forEach(this::setAgentSetting), (x, y) -> {}); } + /** + * Copies APM settings from the provided settings object into the corresponding system properties. + * @param settings the settings to apply + */ void syncAgentSystemProperties(Settings settings) { this.setAgentSetting("recording", Boolean.toString(APM_ENABLED_SETTING.get(settings))); @@ -73,6 +81,12 @@ void syncAgentSystemProperties(Settings settings) { APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); } + /** + * Copies a setting to the APM agent's system properties under elastic.apm, either + * by setting the property if {@code value} has a value, or by deleting the property if it doesn't. + * @param key the config key to set, without any prefix + * @param value the value to set, or null + */ @SuppressForbidden(reason = "Need to be able to manipulate APM agent-related properties to set them dynamically") void setAgentSetting(String key, String value) { final String completeKey = "elastic.apm." + Objects.requireNonNull(key); @@ -90,6 +104,9 @@ void setAgentSetting(String key, String value) { private static final String APM_SETTING_PREFIX = "xpack.apm."; + /** + * A list of APM agent config keys that should never be configured by the user. + */ private static final List PROHIBITED_AGENT_KEYS = List.of( // ES generates a config file and sets this value "config_file", @@ -135,8 +152,13 @@ void setAgentSetting(String key, String value) { NodeScope ); - static final Setting APM_TOKEN_SETTING = SecureSetting.secureString( + static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( APM_SETTING_PREFIX + "secret_token", null ); + + static final Setting APM_API_KEY_SETTING = SecureSetting.secureString( + APM_SETTING_PREFIX + "api_key", + null + ); } diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java index 4f07e82607fe8..4d7c03c59dc5f 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMTracer.java @@ -14,6 +14,7 @@ import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import io.opentelemetry.context.propagation.TextMapGetter; import org.apache.logging.log4j.LogManager; @@ -44,10 +45,19 @@ import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_EXCLUDE_SETTING; import static org.elasticsearch.xpack.apm.APMAgentSettings.APM_TRACING_NAMES_INCLUDE_SETTING; +/** + * This is an implementation of the {@link org.elasticsearch.tracing.Tracer} interface, which uses + * the OpenTelemetry API to capture spans. + *

+ * This module doesn't provide an implementation of the OTel API. Normally that would mean that the + * API's default, no-op implementation would be used. However, when the APM Java is attached, it + * intercepts the {@link GlobalOpenTelemetry} class and provides its own implementation instead. + */ public class APMTracer extends AbstractLifecycleComponent implements org.elasticsearch.tracing.Tracer { private static final Logger LOGGER = LogManager.getLogger(APMTracer.class); + /** Holds in-flight span information. */ private final Map spans = ConcurrentCollections.newConcurrentMap(); private final ClusterService clusterService; @@ -56,10 +66,11 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private List includeNames; private List excludeNames; + /** Built using {@link #includeNames} and {@link #excludeNames}, and filters out spans based on their name. */ private volatile CharacterRunAutomaton filterAutomaton; /** - * This class is required to make all open telemetry services visible at once + * This class is used to make all OpenTelemetry services visible at once */ record APMServices(Tracer tracer, OpenTelemetry openTelemetry) {} @@ -140,6 +151,8 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { spans.computeIfAbsent(traceable.getSpanId(), spanId -> AccessController.doPrivileged((PrivilegedAction) () -> { final SpanBuilder spanBuilder = services.tracer.spanBuilder(traceable.getSpanName()); + // A span can have a parent span, which here is modelled though a parent span context. + // Setting this is important for seeing a complete trace in the APM UI. final Context parentContext = getParentContext(threadContext); if (parentContext != null) { spanBuilder.setParent(parentContext); @@ -149,14 +162,14 @@ public void onTraceStarted(ThreadContext threadContext, Traceable traceable) { final Span span = spanBuilder.startSpan(); final Context contextForNewSpan = Context.current().with(span); + // The new span context can be used as the parent context directly within the same Java process... + threadContext.putTransient(Task.APM_TRACE_CONTEXT, contextForNewSpan); + + // ...whereas for tasks sent to other ES nodes, we need to put trace HTTP headers into the threadContext so + // that they can be propagated. final Map spanHeaders = new HashMap<>(); services.openTelemetry.getPropagators().getTextMapPropagator().inject(contextForNewSpan, spanHeaders, Map::put); spanHeaders.keySet().removeIf(k -> isSupportedContextKey(k) == false); - - // The span context can be used as the parent context directly within the same Java process - threadContext.putTransient(Task.APM_TRACE_CONTEXT, contextForNewSpan); - // Whereas for tasks sent to other ES nodes, we need to put trace headers into the threadContext so that they can be - // propagated threadContext.putHeader(spanHeaders); return contextForNewSpan; @@ -189,6 +202,25 @@ private Context getParentContext(ThreadContext threadContext) { return parentContext; } + /** + * Most of the examples of how to use the OTel API look something like this, where the span context + * is automatically propagated: + * + *

{@code
+     * Span span = tracer.spanBuilder("parent").startSpan();
+     * try (Scope scope = parentSpan.makeCurrent()) {
+     *   // ...do some stuff, possibly creating further spans
+     * } finally {
+     *   span.end();
+     * }
+     * }
+ * This typically isn't useful in Elasticsearch, because a {@link Scope} can't be used across threads. + * However, if a scope is active, then the APM agent can capture additional information, so this method + * exists to make it possible to use scopes in the few situaton where it makes sense. + * + * @param traceable the traceable for which to open a scope. A span must currently be open for this {@code traceable}. + * @return a method to close the scope when you are finished with it. + */ @Override public Releasable withScope(Traceable traceable) { final Context context = spans.get(traceable.getSpanId()); @@ -312,7 +344,7 @@ Map getSpans() { return spans; } - static CharacterRunAutomaton buildAutomaton(List includeNames, List excludeNames) { + private static CharacterRunAutomaton buildAutomaton(List includeNames, List excludeNames) { Automaton includeAutomaton = patternsToAutomaton(includeNames); Automaton excludeAutomaton = patternsToAutomaton(excludeNames); From 8f98e7e5ef0242edf56863cdf7fc7f623fce1b06 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 11:16:43 +0100 Subject: [PATCH 86/90] Update TRACING.md --- TRACING.md | 43 ++++++++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/TRACING.md b/TRACING.md index 1f0d146b6c0b2..37ffff865dfbe 100644 --- a/TRACING.md +++ b/TRACING.md @@ -18,15 +18,14 @@ Elasticsearch to hard-code the use of an SDK. ## How is tracing configured? * The `xpack.apm.enabled` setting must be set to `true` - * The APM agent must be both enabled and configured with server credentials. - See below. + * You must supplied credentials for the APM server. See below. -We have a config file in [`config/elasticapm.properties`][config], which -configures settings that are not dynamic, or should not be changed at runtime. -Other settings can be configured at runtime by using the cluster settings API, -and setting `xpack.apm.agent.` with a string value, where `` -is the APM agent key that you want to configure. For example, to change the -sampling rate: +All APM settings live under `xpack.apm`. All settings related to the Java agent +go under `xpack.apm.agent`. Anything you set under there will be propagated to +the agent. + +For agent settings that can be changed dynamically, you can use the cluster +settings REST API. For example, to change the sampling rate: curl -XPUT \ -H "Content-type: application/json" \ @@ -36,17 +35,23 @@ sampling rate: ### More details about configuration -The APM agent pulls configuration from [multiple sources][agent-config], with a -hierarchy that means, for example, that options set in the config file cannot be -overridden via system properties. This means that Elasticsearch cannot ship with -sensible defaults for dynamic settings in the config file and override them via -system properties. +For context, the APM agent pulls configuration from [multiple +sources][agent-config], with a hierarchy that means, for example, that options +set in the config file cannot be overridden via system properties. + +Now, in order to send tracing data to the APM server, ES needs to configured with +either a `secret_key` or an `api_key`. We could configure these in the agent via +system properties, but then their values would be available to any Java code +that can read system properties. -Instead, static or sensitive config values are put in the config file, and -dynamic settings are left entirely to the system properties. The Elasticsearch -APM plugin has appropriate security access to set the APM-related system -properties. Calls to the ES settings REST API are translated into system -property writes, which the agent later picks up and applies. +Instead, when Elasticsearch bootstraps itself, it compiles all APM settings +together, including any `secret_key` or `api_key` values from the ES keystore, +and writes out a temporary APM config file containin all static configuration +(i.e. values that cannot change after the agent starts). This file is deleted +soon after ES starts up. Settings that are not sensitive and can be changed +dynamically are configure via system properties. Calls to the ES settings REST +API are translated into system property writes, which the agent later picks up +and applies. ## Where is tracing data sent? @@ -95,7 +100,7 @@ tracer when a span should start and end. ## What additional attributes should I set? -That's up to you. Be careful about capture anything that could leak sensitive +That's up to you. Be careful not to capture anything that could leak sensitive or personal information. ## What is "scope" and when should I used it? From 2d3b30bce58ecdfd9518fac70bb7633d63d2a568 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 13:43:42 +0100 Subject: [PATCH 87/90] General fixing and polishing --- .../gradle/plugin/PluginBuildPlugin.java | 3 - .../resources/plugin-descriptor.properties | 4 +- .../server/cli/BootstrapJvmOptions.java | 20 +++---- .../server/cli/JvmOptionsParser.java | 8 +-- .../elasticsearch/server/cli/ServerCli.java | 5 +- .../server/cli/ServerProcess.java | 3 +- .../server/cli/BootstrapJvmOptionsTests.java | 17 +----- .../server/cli/ServerCliTests.java | 8 +-- .../server/cli/ServerProcessTests.java | 9 +-- .../netty4/Netty4HttpServerTransport.java | 11 +++- server/build.gradle | 1 + .../elasticsearch/action/ActionModule.java | 1 - .../cluster/InternalClusterInfoService.java | 56 +++++++++---------- .../cluster/service/MasterService.java | 3 +- .../common/util/concurrent/ThreadContext.java | 47 +++++++++------- .../http/AbstractHttpServerTransport.java | 2 +- .../java/org/elasticsearch/node/Node.java | 1 - .../org/elasticsearch/plugins/Plugin.java | 2 +- .../plugins/PluginDescriptor.java | 15 ++++- .../elasticsearch/rest/RestController.java | 3 +- .../java/org/elasticsearch/tasks/Task.java | 4 +- .../org/elasticsearch/tasks/TaskManager.java | 1 - .../AbstractHttpServerTransportTests.java | 2 +- .../http/DefaultRestChannelTests.java | 7 --- .../elastic-apm-agent-1.30.1.jar.sha1 | 1 - .../elastic-apm-agent-1.32.0.jar.sha1 | 1 + .../opentelemetry-api-1.12.0.jar.sha1 | 1 - .../opentelemetry-api-1.14.0.jar.sha1 | 1 + .../opentelemetry-context-1.12.0.jar.sha1 | 1 - .../opentelemetry-context-1.14.0.jar.sha1 | 1 + ...pentelemetry-semconv-1.12.0-alpha.jar.sha1 | 1 - ...pentelemetry-semconv-1.14.0-alpha.jar.sha1 | 1 + .../xpack/apm/APMAgentSettings.java | 15 +---- .../security/authz/AuthorizationService.java | 1 - 34 files changed, 117 insertions(+), 140 deletions(-) delete mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.32.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.14.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.14.0.jar.sha1 delete mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 create mode 100644 x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.14.0-alpha.jar.sha1 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 7c1e1166197ce..dfd67a197a0cf 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -8,7 +8,6 @@ package org.elasticsearch.gradle.plugin; -import org.apache.tools.ant.filters.ReplaceTokens; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin; @@ -167,7 +166,6 @@ private static CopySpec createBundleSpec( TaskProvider buildProperties ) { var bundleSpec = project.copySpec(); - bundleSpec.from(buildProperties); bundleSpec.from(pluginMetadata, copySpec -> { // metadata (eg custom security policy) @@ -190,7 +188,6 @@ private static CopySpec createBundleSpec( bundleSpec.from("src/main", copySpec -> { copySpec.include("config/**"); copySpec.include("bin/**"); - copySpec.filter(Map.of("tokens", Map.of("es.version", VersionProperties.getElasticsearch())), ReplaceTokens.class); }); return bundleSpec; } diff --git a/build-tools/src/main/resources/plugin-descriptor.properties b/build-tools/src/main/resources/plugin-descriptor.properties index 1e10477e5994a..4d3b2910c8a4c 100644 --- a/build-tools/src/main/resources/plugin-descriptor.properties +++ b/build-tools/src/main/resources/plugin-descriptor.properties @@ -56,10 +56,10 @@ extended.plugins=${extendedPlugins} # # 'has.native.controller': whether or not the plugin has a native controller has.native.controller=${hasNativeController} -<% if (javaOpts.isEmpty() == false) { %> +<% if (type == "bootstrap") { %> # # 'java.opts': any additional command line parameters to pass to the JVM when -# Elasticsearch starts. +# Elasticsearch starts. Only applies to "bootstrap" plugins. java.opts=${javaOpts} <% } %> <% if (licensed) { %> diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java index 699a8b4d850cd..82b0fd5e5b7d0 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/BootstrapJvmOptions.java @@ -24,11 +24,11 @@ * will be added to the JVM's boot classpath. The plugins may also define * additional JVM options, in order to configure the bootstrap plugins. */ -public class BootstrapJvmOptions { +class BootstrapJvmOptions { private BootstrapJvmOptions() {} - public static List bootstrapJvmOptions(Path plugins) throws IOException { + static List bootstrapJvmOptions(Path plugins) throws IOException { if (Files.isDirectory(plugins) == false) { throw new IllegalArgumentException("Plugins path " + plugins + " must be a directory"); } @@ -42,10 +42,7 @@ public static List bootstrapJvmOptions(Path plugins) throws IOException private static List getPluginInfo(Path plugins) throws IOException { final List pluginInfo = new ArrayList<>(); - final List pluginDirs; - try (Stream pluginDirStream = Files.list(plugins)) { - pluginDirs = pluginDirStream.toList(); - } + final List pluginDirs = listFiles(plugins); for (Path pluginDir : pluginDirs) { if (Files.isDirectory(pluginDir) == false) { @@ -54,10 +51,7 @@ private static List getPluginInfo(Path plugins) throws IOException { final List jarFiles = new ArrayList<>(); final Properties props = new Properties(); - final List pluginFiles; - try (Stream pluginFileStream = Files.list(pluginDir)) { - pluginFiles = pluginFileStream.toList(); - } + final List pluginFiles = listFiles(pluginDir); for (Path pluginFile : pluginFiles) { final String lowerCaseName = pluginFile.getFileName().toString().toLowerCase(Locale.ROOT); @@ -110,6 +104,12 @@ static List generateOptions(List pluginInfo) { return bootstrapOptions; } + private static List listFiles(Path dir) throws IOException { + try (Stream pluginDirStream = Files.list(dir)) { + return pluginDirStream.toList(); + } + } + // package-private for testing record PluginInfo(List jarFiles, Properties properties) {} } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index 2bd38c8a07d09..4df4de722e8fd 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -121,13 +121,13 @@ static List determineJvmOptions(ServerArgs args, Path configDir, Path pl private List jvmOptions( ServerArgs args, - final Path configDir, + final Path config, Path plugins, final String esJavaOpts, final Map substitutions ) throws InterruptedException, IOException, JvmOptionsFileParserException, UserException { - final List jvmOptions = readJvmOptionsFiles(configDir); + final List jvmOptions = readJvmOptionsFiles(config); if (esJavaOpts != null) { jvmOptions.addAll(Arrays.stream(esJavaOpts.split("\\s+")).filter(Predicate.not(String::isBlank)).toList()); @@ -137,13 +137,13 @@ private List jvmOptions( final MachineDependentHeap machineDependentHeap = new MachineDependentHeap( new OverridableSystemMemoryInfo(substitutedJvmOptions, new DefaultSystemMemoryInfo()) ); - substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(configDir, substitutedJvmOptions)); + substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(); final List bootstrapOptions = BootstrapJvmOptions.bootstrapJvmOptions(plugins); final List apmOptions; - try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(configDir)) { + try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(config)) { if (keyStoreWrapper != null) { try { keyStoreWrapper.decrypt(args.keystorePassword().clone().getChars()); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index accd646e2c2ea..9209440663c79 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -90,7 +90,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce syncPlugins(terminal, env, processInfo); ServerArgs args = createArgs(options, env, keystorePassword, processInfo); - this.server = startServer(terminal, processInfo, args, env.modulesFile(), env.pluginsFile()); + this.server = startServer(terminal, processInfo, args, env.pluginsFile()); if (options.has(daemonizeOption)) { server.detach(); @@ -221,8 +221,7 @@ protected Command loadTool(String toolname, String libs) { } // protected to allow tests to override - protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path modulesDir, Path pluginsDir) - throws UserException { + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) throws UserException { return ServerProcess.start(terminal, processInfo, args, pluginsDir); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index c87ebe07acf1c..817bf990a10cf 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -85,8 +85,7 @@ interface ProcessStarter { * @return A running server process that is ready for requests * @throws UserException If the process failed during bootstrap */ - public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) - throws UserException { + public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) throws UserException { return start(terminal, processInfo, args, pluginsDir, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/BootstrapJvmOptionsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/BootstrapJvmOptionsTests.java index 4079473676ccf..37c87b33c8a73 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/BootstrapJvmOptionsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/BootstrapJvmOptionsTests.java @@ -16,14 +16,13 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; public class BootstrapJvmOptionsTests extends ESTestCase { public void testGenerateOptionsHandlesNoPlugins() { - final List options = BootstrapJvmOptions.generateOptions(List.of(), List.of()); + final List options = BootstrapJvmOptions.generateOptions(List.of()); assertThat(options, is(empty())); } @@ -32,20 +31,10 @@ public void testGenerateOptionsIgnoresNonBootstrapPlugins() { props.put("type", "isolated"); List info = List.of(new PluginInfo(List.of(), props)); - final List options = BootstrapJvmOptions.generateOptions(List.of(), info); + final List options = BootstrapJvmOptions.generateOptions(info); assertThat(options, is(empty())); } - public void testGenerateOptionsHandlesModules() { - Properties props = new Properties(); - props.put("type", "isolated"); - props.put("java.opts", "-ea"); - List info = List.of(new PluginInfo(List.of(), props)); - - final List options = BootstrapJvmOptions.generateOptions(info, List.of()); - assertThat(options, equalTo(List.of("-ea"))); - } - public void testGenerateOptionsHandlesBootstrapPlugins() { Properties propsWithoutJavaOpts = new Properties(); propsWithoutJavaOpts.put("type", "bootstrap"); @@ -66,7 +55,7 @@ public void testGenerateOptionsHandlesBootstrapPlugins() { propsWithJavaOpts.put("java.opts", "-Dkey=value -DotherKey=otherValue"); PluginInfo info4 = new PluginInfo(List.of("/path/fourth.jar"), propsWithJavaOpts); - final List options = BootstrapJvmOptions.generateOptions(List.of(), List.of(info1, info2, info3, info4)); + final List options = BootstrapJvmOptions.generateOptions(List.of(info1, info2, info3, info4)); assertThat( options, contains( diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index 88c3b7d1e903f..e7ba031fe9c00 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -423,13 +423,7 @@ protected Command loadTool(String toolname, String libs) { } @Override - protected ServerProcess startServer( - Terminal terminal, - ProcessInfo processInfo, - ServerArgs args, - Path modulesDir, - Path pluginsDir - ) { + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args, Path pluginsDir) { if (argsValidator != null) { argsValidator.accept(args); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index 1f1d86f6f1c71..cdbd4d0d2bcc2 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -201,14 +201,7 @@ ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePass process = new MockElasticsearchProcess(); return process; }; - return ServerProcess.start( - terminal, - pinfo, - args, - esHomeDir.resolve("plugins"), - optionsBuilder, - starter - ); + return ServerProcess.start(terminal, pinfo, args, esHomeDir.resolve("plugins"), optionsBuilder, starter); } public void testProcessBuilder() throws Exception { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index fc9d73cf246d9..92529b2d9f0aa 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -149,7 +149,16 @@ public Netty4HttpServerTransport( SharedGroupFactory sharedGroupFactory, Tracer tracer ) { - super(settings, networkService, Netty4Utils.createRecycler(settings), threadPool, xContentRegistry, dispatcher, clusterSettings, tracer); + super( + settings, + networkService, + Netty4Utils.createRecycler(settings), + threadPool, + xContentRegistry, + dispatcher, + clusterSettings, + tracer + ); Netty4Utils.setAvailableProcessors(EsExecutors.NODE_PROCESSORS_SETTING.get(settings)); NettyAllocator.logAllocatorDescriptionIfNeeded(); this.sharedGroupFactory = sharedGroupFactory; diff --git a/server/build.gradle b/server/build.gradle index f1f471fd66a3c..231eff76e43b6 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -72,6 +72,7 @@ dependencies { internalClusterTestImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'server' } + } tasks.named("forbiddenPatterns").configure { diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index e0d007a97e977..f8d8cb9d43340 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -468,7 +468,6 @@ public ActionModule( this.threadPool = threadPool; actions = setupActions(actionPlugins); actionFilters = setupActionFilters(actionPlugins); - autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver, systemIndices); destructiveOperations = new DestructiveOperations(settings, clusterSettings); Set headers = Stream.concat( diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 97adf314549e5..21f676a096712 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -178,9 +178,9 @@ void execute() { public void onResponse(NodesStatsResponse nodesStatsResponse) { logger.trace("received node stats response"); - for (final FailedNodeException failure : nodesStatsResponse.failures()) { - logger.warn(() -> "failed to retrieve stats for node [" + failure.nodeId() + "]", failure.getCause()); - } + for (final FailedNodeException failure : nodesStatsResponse.failures()) { + logger.warn(() -> "failed to retrieve stats for node [" + failure.nodeId() + "]", failure.getCause()); + } ImmutableOpenMap.Builder leastAvailableUsagesBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder mostAvailableUsagesBuilder = ImmutableOpenMap.builder(); @@ -217,36 +217,36 @@ public void onFailure(Exception e) { public void onResponse(IndicesStatsResponse indicesStatsResponse) { logger.trace("received indices stats response"); - if (indicesStatsResponse.getShardFailures().length > 0) { - final Set failedNodeIds = new HashSet<>(); - for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { - if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { - if (failedNodeIds.add(failedNodeException.nodeId())) { + if (indicesStatsResponse.getShardFailures().length > 0) { + final Set failedNodeIds = new HashSet<>(); + for (final DefaultShardOperationFailedException shardFailure : indicesStatsResponse.getShardFailures()) { + if (shardFailure.getCause()instanceof final FailedNodeException failedNodeException) { + if (failedNodeIds.add(failedNodeException.nodeId())) { + logger.warn( + () -> format("failed to retrieve shard stats from node [%s]", failedNodeException.nodeId()), + failedNodeException.getCause() + ); + } + logger.trace( + () -> format( + "failed to retrieve stats for shard [%s][%s]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() + ); + } else { logger.warn( - () -> format("failed to retrieve shard stats from node [%s]", failedNodeException.nodeId()), - failedNodeException.getCause() + () -> format( + "failed to retrieve stats for shard [%s][%s]", + shardFailure.index(), + shardFailure.shardId() + ), + shardFailure.getCause() ); } - logger.trace( - () -> format( - "failed to retrieve stats for shard [%s][%s]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); - } else { - logger.warn( - () -> format( - "failed to retrieve stats for shard [%s][%s]", - shardFailure.index(), - shardFailure.shardId() - ), - shardFailure.getCause() - ); } } - } final ShardStats[] stats = indicesStatsResponse.getShards(); final ImmutableOpenMap.Builder shardSizeByIdentifierBuilder = ImmutableOpenMap.builder(); diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 48332e5e9056a..ac07d084e75b6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -285,8 +285,7 @@ private void runTasks( try (var ignored = threadPool.getThreadContext().newTraceContext()) { final Task task = taskManager.register("master", STATE_UPDATE_ACTION_NAME, new TaskAwareRequest() { @Override - public void setParentTask(TaskId taskId) { - } + public void setParentTask(TaskId taskId) {} @Override public TaskId getParentTask() { diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 4ac30f3634106..131b2423665fe 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -107,7 +107,7 @@ public StoredContext stashContext() { * X-Opaque-ID should be preserved in a threadContext in order to propagate this across threads. * This is needed so the DeprecationLogger in another thread can see the value of X-Opaque-ID provided by a user. * The same is applied to Task.TRACE_ID. - * Otherwise when context is stash, it should be empty. + * Otherwise when context is stashed, it should be empty. */ boolean hasHeadersToCopy = false; if (context.requestHeaders.isEmpty() == false) { @@ -118,26 +118,20 @@ public StoredContext stashContext() { } } } - // HACK HACK HACK - final Map copiedTransientEntries = new HashMap<>(); - if (context.transientHeaders.isEmpty() == false) { - List transientKeysToCopy = List.of(Task.APM_TRACE_CONTEXT); - for (String transientKey : transientKeysToCopy) { - if (context.transientHeaders.containsKey(transientKey)) { - copiedTransientEntries.put(transientKey, context.transientHeaders.get(transientKey)); - } - } - } - if (hasHeadersToCopy || copiedTransientEntries.isEmpty() == false) { + + boolean hasTransientHeadersToCopy = context.transientHeaders.containsKey(Task.APM_TRACE_CONTEXT); + + if (hasHeadersToCopy || hasTransientHeadersToCopy) { ThreadContextStruct threadContextStruct = DEFAULT_CONTEXT; if (hasHeadersToCopy) { - Map map = headers(context); - threadContextStruct = DEFAULT_CONTEXT.putHeaders(map); + Map copiedHeaders = getHeadersToCopy(context); + threadContextStruct = DEFAULT_CONTEXT.putHeaders(copiedHeaders); } - if (copiedTransientEntries.isEmpty() == false) { - for (Map.Entry entry : copiedTransientEntries.entrySet()) { - threadContextStruct = threadContextStruct.putTransient(entry.getKey(), entry.getValue()); - } + if (hasTransientHeadersToCopy) { + threadContextStruct = threadContextStruct.putTransient( + Task.APM_TRACE_CONTEXT, + context.transientHeaders.get(Task.APM_TRACE_CONTEXT) + ); } threadLocal.set(threadContextStruct); } else { @@ -151,6 +145,14 @@ public StoredContext stashContext() { }; } + /** + * When using a {@link org.elasticsearch.tracing.Tracer} to capture activity in Elasticsearch, when a parent span is already + * in progress, it is necessary to start a new context before beginning a child span. This method creates a context, + * moving tracing-related fields to different names so that a new child span can be started. This child span will pick up + * the moved fields and use them to establish the parent-child relationship. + * + * @return a stored context, which can be restored when this context is no longer needed. + */ public StoredContext newTraceContext() { final ThreadContextStruct context = threadLocal.get(); final Map newRequestHeaders = new HashMap<>(context.requestHeaders); @@ -183,6 +185,13 @@ public StoredContext newTraceContext() { return () -> threadLocal.set(context); } + /** + * When using a {@link org.elasticsearch.tracing.Tracer}, sometimes you need to start a span completely unrelated + * to any current span. In order to avoid any parent/child relationship being created, this method creates a new + * context that clears all the tracing fields. + * + * @return a stored context, which can be restored when this context is no longer needed. + */ public StoredContext clearTraceContext() { final ThreadContextStruct context = threadLocal.get(); final Map newRequestHeaders = new HashMap<>(context.requestHeaders); @@ -208,7 +217,7 @@ public StoredContext clearTraceContext() { return () -> threadLocal.set(context); } - private static Map headers(ThreadContextStruct context) { + private static Map getHeadersToCopy(ThreadContextStruct context) { Map map = Maps.newMapWithExpectedSize(HEADERS_TO_COPY.size()); for (String header : HEADERS_TO_COPY) { final String value = context.requestHeaders.get(header); diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index 1cde859944b91..51083bb0036db 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -417,10 +417,10 @@ private void handleIncomingRequest(final HttpRequest httpRequest, final HttpChan * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of these * parameter values. */ - final ThreadContext threadContext = threadPool.getThreadContext(); final RestChannel channel; { RestChannel innerChannel; + ThreadContext threadContext = threadPool.getThreadContext(); try { innerChannel = new DefaultRestChannel( httpChannel, diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 007764cce30a5..46f484f5368f9 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -731,7 +731,6 @@ protected Node( clusterService.addListener(new SystemIndexMetadataUpgradeService(systemIndices, clusterService)); } new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetadataUpgraders); - final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 71ade5d3df3d9..02599f94e90d8 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -208,7 +208,7 @@ public Collection getAdditionalIndexSettingProviders(Index } /** - * Called with a Tracers so that each plugin has a chance to work with it. + * Called with a Tracer so that each plugin has a chance to work with it. */ public void onTracer(Tracer tracer) {} } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java b/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java index 29cb321144fe8..93b46660b4c16 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java @@ -218,6 +218,12 @@ public static PluginDescriptor readFromProperties(final Path path) throws IOExce final String javaOpts = propsMap.remove("java.opts"); + if (type != PluginType.BOOTSTRAP && Strings.isNullOrEmpty(javaOpts) == false) { + throw new IllegalArgumentException( + "[java.opts] can only have a value when [type] is set to [bootstrap] for plugin [" + name + "]" + ); + } + boolean isLicensed = parseBooleanValue(name, "licensed", propsMap.remove("licensed")); if (propsMap.isEmpty() == false) { @@ -378,7 +384,8 @@ public PluginType getType() { } /** - * Returns any additional JVM command-line options that this plugin adds. + * Returns any additional JVM command-line options that this plugin adds. Only applies to + * plugins whose type is "bootstrap". * * @return any additional JVM options. */ @@ -412,7 +419,9 @@ public XContentBuilder toXContentFragment(XContentBuilder builder, Params params builder.field("has_native_controller", hasNativeController); builder.field("licensed", isLicensed); builder.field("type", type); - builder.field("java_opts", javaOpts); + if (type == PluginType.BOOTSTRAP) { + builder.field("java_opts", javaOpts); + } return builder; } @@ -450,7 +459,7 @@ public String toString(String prefix) { appendLine(lines, prefix, "Licensed: ", isLicensed); appendLine(lines, prefix, "Type: ", type); - if (javaOpts != null) { + if (type == PluginType.BOOTSTRAP) { appendLine(lines, prefix, "Java Opts: ", javaOpts); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 1edeb32e92188..758b9d6334475 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -501,8 +501,7 @@ private void copyRestHeaders(RestRequest request, ThreadContext threadContext) t } else if (name.equals(Task.TRACE_PARENT_HTTP_HEADER)) { String traceparent = distinctHeaderValues.get(0); if (traceparent.length() >= 55) { - final String traceId = traceparent.substring(3, 35); - threadContext.putHeader(Task.TRACE_ID, traceId); + threadContext.putHeader(Task.TRACE_ID, traceparent.substring(3, 35)); threadContext.putTransient("parent_" + Task.TRACE_PARENT_HTTP_HEADER, traceparent); } } else if (name.equals(Task.TRACE_STATE)) { diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 6cc1212f1e803..49c56e03d4b4c 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -44,10 +44,10 @@ public class Task implements Traceable { */ public static final String X_ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER = "X-elastic-product-origin"; + public static final String TRACE_STATE = "tracestate"; /** - * Is used internally to pass the apm trace context between the nodes + * Used internally to pass the apm trace context between the nodes */ - public static final String TRACE_STATE = "tracestate"; public static final String APM_TRACE_CONTEXT = "apm.local.context"; diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index e296b9944f22f..b4826212156b1 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -124,7 +124,6 @@ public Task register(String type, String action, TaskAwareRequest request) { long headerSize = 0; long maxSize = maxHeaderSize.getBytes(); ThreadContext threadContext = threadPool.getThreadContext(); - for (String key : taskHeaders) { String httpHeader = threadContext.getHeader(key); if (httpHeader != null) { diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 238db48b9ff13..328c45180c949 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -37,8 +37,8 @@ import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BytesRefRecycler; import org.elasticsearch.tracing.Tracer; +import org.elasticsearch.transport.BytesRefRecycler; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.junit.After; diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index fd227348773c5..8a7a59f900740 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; import org.elasticsearch.common.recycler.Recycler; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; @@ -30,11 +29,9 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.ActionListenerUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.tracing.Tracer; import org.elasticsearch.transport.BytesRefRecycler; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -49,8 +46,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -60,13 +55,11 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class DefaultRestChannelTests extends ESTestCase { diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 deleted file mode 100644 index 0957a24284398..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.30.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -631752f53512fb9daf9ffe6cd95de80430968ad5 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.32.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.32.0.jar.sha1 new file mode 100644 index 0000000000000..26c4e0e9ae965 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/elastic-apm-agent-1.32.0.jar.sha1 @@ -0,0 +1 @@ +00ae7221875b5f6aafdf403c3ec0a45f26d0eb67 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 deleted file mode 100644 index 4531a5d1639d9..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47ef9e467f9b734d4e4e4df15c3bd62dd991db5d \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.14.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.14.0.jar.sha1 new file mode 100644 index 0000000000000..d6cb6ec9cbba2 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-api-1.14.0.jar.sha1 @@ -0,0 +1 @@ +261882a3a87ef82af3b780e78fd68d60e1e03c37 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 deleted file mode 100644 index 17f2f0b1d8d9a..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7d8ec9f863faad36cbaa2f9bd10195ec5943fd70 \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.14.0.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.14.0.jar.sha1 new file mode 100644 index 0000000000000..7585bfdad68eb --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-context-1.14.0.jar.sha1 @@ -0,0 +1 @@ +2fff23ea18a8a990c00a2802c1859f724369f817 diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 deleted file mode 100644 index 401f5203ae13f..0000000000000 --- a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.12.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6542c9a536144567682cf95fd7cba97dc6eacc4a \ No newline at end of file diff --git a/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.14.0-alpha.jar.sha1 b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.14.0-alpha.jar.sha1 new file mode 100644 index 0000000000000..badd2dabe9b31 --- /dev/null +++ b/x-pack/plugin/apm-integration/licenses/opentelemetry-semconv-1.14.0-alpha.jar.sha1 @@ -0,0 +1 @@ +bf4638d0076051445ce7298ee3c0769fc8a54a66 diff --git a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java index 1d9f9d022d80d..c3d2b8fd440e8 100644 --- a/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java +++ b/x-pack/plugin/apm-integration/src/main/java/org/elasticsearch/xpack/apm/APMAgentSettings.java @@ -71,10 +71,7 @@ void syncAgentSystemProperties(Settings settings) { // do anything if those settings are never configured. APM_AGENT_DEFAULT_SETTINGS.keySet() .forEach( - key -> this.setAgentSetting( - key, - APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings) - ) + key -> this.setAgentSetting(key, APM_AGENT_SETTINGS.getConcreteSetting(APM_AGENT_SETTINGS.getKey() + key).get(settings)) ); // Then apply values from the settings in the cluster state @@ -152,13 +149,7 @@ void setAgentSetting(String key, String value) { NodeScope ); - static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString( - APM_SETTING_PREFIX + "secret_token", - null - ); + static final Setting APM_SECRET_TOKEN_SETTING = SecureSetting.secureString(APM_SETTING_PREFIX + "secret_token", null); - static final Setting APM_API_KEY_SETTING = SecureSetting.secureString( - APM_SETTING_PREFIX + "api_key", - null - ); + static final Setting APM_API_KEY_SETTING = SecureSetting.secureString(APM_SETTING_PREFIX + "api_key", null); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 0a81369c9f80c..4c5df0bb85a7e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -217,7 +217,6 @@ public void authorize( final ActionListener listener ) { final AuthorizationContext enclosingContext = extractAuthorizationContext(threadContext, action); - /* authorization fills in certain transient headers, which must be observed in the listener (action handler execution) * as well, but which must not bleed across different action context (eg parent-child action contexts). *

From 52fd4d35988bd764fd5a6ff0c5233f630795bf37 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 14:53:14 +0100 Subject: [PATCH 88/90] Remove debug gradle config --- qa/apm/build.gradle | 2 -- 1 file changed, 2 deletions(-) diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle index 98c0f22e46aa4..245f13422c1ef 100644 --- a/qa/apm/build.gradle +++ b/qa/apm/build.gradle @@ -22,8 +22,6 @@ dependencies { dockerCompose { environment.put 'STACK_VERSION', VersionProperties.elasticsearch - removeContainers = false - retainContainersOnStartupFailure = false } elasticsearch_distributions { From 5e0892bba8884c70845a06c0d152aecdcc53250e Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 14:54:05 +0100 Subject: [PATCH 89/90] Fix typo --- qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java index 28154ae869a57..63c8957a6b446 100644 --- a/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java +++ b/qa/apm/src/test/java/org/elasticsearch/xpack/apm/ApmIT.java @@ -32,7 +32,7 @@ public class ApmIT extends ESRestTestCase { /** * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. The traces are generated in - * a separate Docker container, which continually fetches `/_notes/stats`. + * a separate Docker container, which continually fetches `/_nodes/stats`. */ public void testCapturesTracesForHttpTraffic() throws Exception { checkTracesDataStream(); From 4912eecd1a605bbc938aad228aecd48208f6215a Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 15 Jun 2022 14:54:41 +0100 Subject: [PATCH 90/90] Remove run script --- run.sh | 62 ---------------------------------------------------------- 1 file changed, 62 deletions(-) delete mode 100755 run.sh diff --git a/run.sh b/run.sh deleted file mode 100755 index a9c953fbd76a1..0000000000000 --- a/run.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -# Clear this so that ES doesn't repeatedly complain about ignoring it -export JAVA_HOME='' - -AGENT_VERSION=$(awk '/apm_agent/ { print $3 }' build-tools-internal/version.properties) -ES_VERSION=$(awk '/^elasticsearch/ { print $3 }' build-tools-internal/version.properties) - -# This is the path that `./gradlew localDistro` prints out at the end -cd "build/distribution/local/elasticsearch-${ES_VERSION}-SNAPSHOT" - -# URL and token for sending traces -SERVER_URL="" -SECRET_TOKEN="" - -if ! grep -q xpack.apm.enabled config/elasticsearch.yml ; then - echo "xpack.apm.enabled: true" >> config/elasticsearch.yml -fi -if ! grep -q xpack.apm.agent.server_url config/elasticsearch.yml ; then - echo "xpack.apm.agent.server_url: $SERVER_URL" >> config/elasticsearch.yml -fi - -# Configure the ES keystore, so that we can use `elastic:password` for REST -# requests -if [[ ! -f config/elasticsearch.keystore ]]; then - ./bin/elasticsearch-keystore create - echo "password" | ./bin/elasticsearch-keystore add -x 'bootstrap.password' - echo "$SECRET_TOKEN" | ./bin/elasticsearch-keystore add -x "xpack.apm.secret_token" -fi - - -# Optional - override the agent jar -OVERRIDE_AGENT_JAR="$HOME/.m2/repository/co/elastic/apm/elastic-apm-agent/1.31.1-SNAPSHOT/elastic-apm-agent-1.31.1-SNAPSHOT.jar" - -if [[ -n "$OVERRIDE_AGENT_JAR" ]]; then - # Copy in WIP agent - cp "$OVERRIDE_AGENT_JAR" "modules/x-pack-apm-integration/elastic-apm-agent-${AGENT_VERSION}.jar" -fi - -# Configure the agent -#  1. Enable the agent -# 2. Set the server URL -# 3. Set the secret token -# perl -p -i -e " s|enabled: false|enabled: true| ; s|# server_url.*|server_url: $SERVER_URL| ; s|# secret_token.*|secret_token: $SECRET_TOKEN|" config/elasticapm.properties -# perl -p -i -e " s|log_level: error|log_level: debug| " config/elasticapm.properties - -# Require a debugger on 5007 in order to run: -# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:5007 " - -# Just run but expose a debugging server on 5007 -# export ES_JAVA_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5007 " - -# Hardcore security manager debugging -# export ES_JAVA_OPTS="-Djava.security.debug=failure" -# export ES_JAVA_OPTS="-Djava.security.debug=access,failure" - -# export ES_JAVA_OPTS=" -ea " - -# exec ./bin/elasticsearch -Expack.apm.enabled=true -Eingest.geoip.downloader.enabled=false -exec ./bin/elasticsearch -Eingest.geoip.downloader.enabled=false