diff --git a/compose/cryostat.yml b/compose/cryostat.yml index a1f4145e6..0ce57a170 100644 --- a/compose/cryostat.yml +++ b/compose/cryostat.yml @@ -26,9 +26,11 @@ services: io.cryostat.jmxPort: "0" io.cryostat.jmxUrl: "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi" environment: + QUARKUS_LOG_LEVEL: TRACE QUARKUS_HTTP_HOST: "cryostat" QUARKUS_HTTP_PORT: ${CRYOSTAT_HTTP_PORT} QUARKUS_HIBERNATE_ORM_LOG_SQL: "true" + CRYOSTAT_DISABLE_JMX_AUTH: "true" CRYOSTAT_DISCOVERY_JDP_ENABLED: ${CRYOSTAT_DISCOVERY_JDP_ENABLED:-true} CRYOSTAT_DISCOVERY_PODMAN_ENABLED: ${CRYOSTAT_DISCOVERY_PODMAN_ENABLED:-true} CRYOSTAT_DISCOVERY_DOCKER_ENABLED: ${CRYOSTAT_DISCOVERY_DOCKER_ENABLED:-true} diff --git a/compose/sample-apps.yml b/compose/sample-apps.yml index 938a37f9d..0138b8ca3 100644 --- a/compose/sample-apps.yml +++ b/compose/sample-apps.yml @@ -119,6 +119,7 @@ services: CRYOSTAT_AGENT_HARVESTER_MAX_FILES: 3 CRYOSTAT_AGENT_HARVESTER_EXIT_MAX_AGE_MS: 60000 CRYOSTAT_AGENT_HARVESTER_EXIT_MAX_SIZE_B: 153600 # "$(echo 1024*150 | bc)" + CRYOSTAT_AGENT_API_WRITES_ENABLED: "true" restart: always healthcheck: test: curl --fail http://localhost:10010 || exit 1 diff --git a/pom.xml b/pom.xml index a9e433848..1e5fdcb9d 100644 --- a/pom.xml +++ b/pom.xml @@ -22,6 +22,7 @@ 1.16.1 2.13.0 + 4.4 5.2.1 3.13.0 1.7 @@ -164,6 +165,11 @@ commons-io ${org.apache.commons.io.version} + + org.apache.commons + commons-collections4 + ${org.apache.commons.collections.version} + org.apache.httpcomponents.client5 httpclient5 diff --git a/schema/openapi.yaml b/schema/openapi.yaml index a9a86dbf3..87a077fa5 100644 --- a/schema/openapi.yaml +++ b/schema/openapi.yaml @@ -2278,6 +2278,7 @@ paths: nullable: true type: string restart: + deprecated: true nullable: true type: boolean toDisk: diff --git a/schema/schema.graphql b/schema/schema.graphql index 69284ffef..a3b06cc12 100644 --- a/schema/schema.graphql +++ b/schema/schema.graphql @@ -274,6 +274,7 @@ input DiscoveryNodeFilterInput { name: String names: [String] nodeTypes: [String] + targetIds: [BigInteger] } input Entry_String_StringInput { diff --git a/src/main/docker/include/jmc-agent.jar b/src/main/docker/include/jmc-agent.jar new file mode 100644 index 000000000..c0ff66d4a Binary files /dev/null and b/src/main/docker/include/jmc-agent.jar differ diff --git a/src/main/java/io/cryostat/JsonRequestFilter.java b/src/main/java/io/cryostat/JsonRequestFilter.java index 5c9109554..c61651b27 100644 --- a/src/main/java/io/cryostat/JsonRequestFilter.java +++ b/src/main/java/io/cryostat/JsonRequestFilter.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import jakarta.inject.Inject; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.core.MediaType; @@ -45,7 +46,7 @@ public class JsonRequestFilter implements ContainerRequestFilter { "/api/v3/graphql"); private final Map compiledPatterns = new HashMap<>(); - private final ObjectMapper objectMapper = new ObjectMapper(); + @Inject ObjectMapper objectMapper; @Override public void filter(ContainerRequestContext requestContext) throws IOException { diff --git a/src/main/java/io/cryostat/credentials/CredentialsFinder.java b/src/main/java/io/cryostat/credentials/CredentialsFinder.java new file mode 100644 index 000000000..3d0eb7225 --- /dev/null +++ b/src/main/java/io/cryostat/credentials/CredentialsFinder.java @@ -0,0 +1,82 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.credentials; + +import java.net.URI; +import java.util.Optional; + +import io.cryostat.expressions.MatchExpressionEvaluator; +import io.cryostat.targets.Target; +import io.cryostat.targets.Target.EventKind; +import io.cryostat.targets.Target.TargetDiscovery; + +import io.quarkus.vertx.ConsumeEvent; +import io.smallrye.common.annotation.Blocking; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.apache.commons.collections4.BidiMap; +import org.apache.commons.collections4.bidimap.DualHashBidiMap; +import org.jboss.logging.Logger; +import org.projectnessie.cel.tools.ScriptException; + +@ApplicationScoped +public class CredentialsFinder { + + @Inject MatchExpressionEvaluator expressionEvaluator; + @Inject Logger logger; + + private final BidiMap cache = new DualHashBidiMap<>(); + + @ConsumeEvent(Credential.CREDENTIALS_DELETED) + void onCredentialsDeleted(Credential credential) { + cache.removeValue(credential); + } + + @ConsumeEvent(Target.TARGET_JVM_DISCOVERY) + void onMessage(TargetDiscovery event) { + if (EventKind.LOST.equals(event.kind())) { + cache.remove(event.serviceRef()); + } + } + + @Blocking + public Optional getCredentialsForTarget(Target target) { + return Optional.ofNullable( + cache.computeIfAbsent( + target, + t -> + Credential.listAll().stream() + .filter( + c -> { + try { + return expressionEvaluator.applies( + c.matchExpression, t); + } catch (ScriptException e) { + logger.error(e); + return false; + } + }) + .findFirst() + .orElse(null))); + } + + @Blocking + public Optional getCredentialsForConnectUrl(URI connectUrl) { + return Target.find("connectUrl", connectUrl) + .singleResultOptional() + .flatMap(this::getCredentialsForTarget); + } +} diff --git a/src/main/java/io/cryostat/discovery/ContainerDiscovery.java b/src/main/java/io/cryostat/discovery/ContainerDiscovery.java index 1ce795ff2..48dffc944 100644 --- a/src/main/java/io/cryostat/discovery/ContainerDiscovery.java +++ b/src/main/java/io/cryostat/discovery/ContainerDiscovery.java @@ -251,16 +251,14 @@ private void doContainerListRequest(Consumer> successHandler item.body(), new TypeReference>() {})); } catch (JsonProcessingException e) { - logger.error("Json processing error"); + logger.error("Json processing error", e); } }, failure -> { - logger.error( - String.format("%s API request failed", getRealm()), - failure); + logger.errorv(failure, "{0} API request failed", getRealm()); }); } catch (JsonProcessingException e) { - logger.error("Json processing error"); + logger.error("Json processing error", e); } } @@ -279,13 +277,12 @@ private CompletableFuture doContainerInspectRequest(ContainerS result.complete( mapper.readValue(item.body(), ContainerDetails.class)); } catch (JsonProcessingException e) { - logger.error("Json processing error"); + logger.error("Json processing error", e); result.completeExceptionally(e); } }, failure -> { - logger.error( - String.format("%s API request failed", getRealm()), failure); + logger.errorv(failure, "{0} API request failed", getRealm()); result.completeExceptionally(failure); }); return result; @@ -322,7 +319,7 @@ public void handleContainerEvent(ContainerSpec desc, EventKind evtKind) { .Hostname; } catch (InterruptedException | TimeoutException | ExecutionException e) { containers.remove(desc); - logger.warn(String.format("Invalid %s target observed", getRealm()), e); + logger.warnv(e, "Invalid {0} target observed", getRealm()); return; } } @@ -331,7 +328,7 @@ public void handleContainerEvent(ContainerSpec desc, EventKind evtKind) { connectUrl = URI.create(serviceUrl.toString()); } catch (MalformedURLException | URISyntaxException e) { containers.remove(desc); - logger.warn(String.format("Invalid %s target observed", getRealm()), e); + logger.warnv(e, "Invalid {0} target observed", getRealm()); return; } diff --git a/src/main/java/io/cryostat/discovery/CustomDiscovery.java b/src/main/java/io/cryostat/discovery/CustomDiscovery.java index 0d94ada2a..3d3428426 100644 --- a/src/main/java/io/cryostat/discovery/CustomDiscovery.java +++ b/src/main/java/io/cryostat/discovery/CustomDiscovery.java @@ -25,6 +25,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import io.cryostat.ConfigProperties; import io.cryostat.V2Response; import io.cryostat.credentials.Credential; import io.cryostat.expressions.MatchExpression; @@ -49,6 +50,7 @@ import jakarta.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.eclipse.microprofile.config.inject.ConfigProperty; import org.hibernate.exception.ConstraintViolationException; import org.jboss.logging.Logger; import org.jboss.resteasy.reactive.RestForm; @@ -68,6 +70,9 @@ public class CustomDiscovery { @Inject EventBus bus; @Inject TargetConnectionManager connectionManager; + @ConfigProperty(name = ConfigProperties.CONNECTIONS_FAILED_TIMEOUT) + Duration timeout; + @Transactional void onStart(@Observes StartupEvent evt) { DiscoveryNode universe = DiscoveryNode.getUniverse(); @@ -140,7 +145,7 @@ Response doV2Create( credential, conn -> conn.getJvmIdentifier().getHash()) .await() - .atMost(Duration.ofSeconds(10)); + .atMost(timeout); } catch (Exception e) { logger.error("Target connection failed", e); return Response.status(Response.Status.BAD_REQUEST.getStatusCode()) diff --git a/src/main/java/io/cryostat/discovery/Discovery.java b/src/main/java/io/cryostat/discovery/Discovery.java index ffd4bb369..2b2aa99a6 100644 --- a/src/main/java/io/cryostat/discovery/Discovery.java +++ b/src/main/java/io/cryostat/discovery/Discovery.java @@ -397,17 +397,17 @@ public void execute(JobExecutionContext context) throws JobExecutionException { var cb = PluginCallback.create(plugin); if (refresh) { cb.refresh(); - logger.infov( + logger.debugv( "Refreshed discovery plugin: {0} @ {1}", plugin.realm, plugin.callback); } else { cb.ping(); - logger.infov( + logger.debugv( "Retained discovery plugin: {0} @ {1}", plugin.realm, plugin.callback); } } catch (Exception e) { if (plugin != null) { - logger.infov( - "Pruned discovery plugin: {0} @ {1}", plugin.realm, plugin.callback); + logger.debugv( + e, "Pruned discovery plugin: {0} @ {1}", plugin.realm, plugin.callback); plugin.realm.delete(); plugin.delete(); new DiscoveryPlugin.PluginCallback.DiscoveryPluginAuthorizationHeaderFactory( diff --git a/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java b/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java index 75eaf20b7..81a03bf07 100644 --- a/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java +++ b/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java @@ -89,6 +89,8 @@ public void prePersist(DiscoveryPlugin plugin) { return; } if (plugin.callback == null) { + plugin.realm.delete(); + plugin.delete(); throw new IllegalArgumentException(); } try { @@ -97,8 +99,12 @@ public void prePersist(DiscoveryPlugin plugin) { "Registered discovery plugin: {0} @ {1}", plugin.realm.name, plugin.callback); } catch (URISyntaxException e) { + plugin.realm.delete(); + plugin.delete(); throw new IllegalArgumentException(e); } catch (Exception e) { + plugin.realm.delete(); + plugin.delete(); logger.error("Discovery Plugin ping failed", e); throw e; } diff --git a/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java b/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java index ac33b70e3..1f4de6f9e 100644 --- a/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java +++ b/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java @@ -100,19 +100,16 @@ boolean load(String matchExpression, Target target) throws ScriptException { } void invalidate(String matchExpression) { + var cache = cacheManager.getCache(CACHE_NAME).orElseThrow(); // 0-index is important here. the argument order of the load() method determines the // composite key order - cacheManager - .getCache(CACHE_NAME) - .ifPresent( - c -> - c.invalidateIf( - k -> - Objects.equals( - (String) - ((CompositeCacheKey) k) - .getKeyElements()[0], - matchExpression))); + cache.invalidateIf( + k -> + Objects.equals( + (String) ((CompositeCacheKey) k).getKeyElements()[0], + matchExpression)) + .subscribe() + .with((v) -> {}, logger::warn); } public boolean applies(MatchExpression matchExpression, Target target) throws ScriptException { diff --git a/src/main/java/io/cryostat/graphql/ActiveRecordings.java b/src/main/java/io/cryostat/graphql/ActiveRecordings.java index a78c3c490..4f3f35985 100644 --- a/src/main/java/io/cryostat/graphql/ActiveRecordings.java +++ b/src/main/java/io/cryostat/graphql/ActiveRecordings.java @@ -119,7 +119,7 @@ public List archiveRecording( .map(n -> n.target)) .flatMap( t -> - t.activeRecordings.stream() + recordingHelper.listActiveRecordings(t).stream() .filter(r -> recordings == null || recordings.test(r))) .map( recording -> { @@ -148,7 +148,7 @@ public List stopRecording( .map(n -> n.target)) .flatMap( t -> - t.activeRecordings.stream() + recordingHelper.listActiveRecordings(t).stream() .filter(r -> recordings == null || recordings.test(r))) .map( recording -> { @@ -172,23 +172,16 @@ public List stopRecording( + " the subtrees of the discovery nodes matching the given filter") public List deleteRecording( @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { - var activeRecordings = - DiscoveryNode.listAll().stream() - .filter(nodes) - .flatMap( - node -> - RootNode.recurseChildren(node, n -> n.target != null) - .stream() - .map(n -> n.target)) - .flatMap( - t -> - t.activeRecordings.stream() - .filter( - r -> - recordings == null - || recordings.test(r))) - .toList(); - return activeRecordings.stream() + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .flatMap( + t -> + recordingHelper.listActiveRecordings(t).stream() + .filter(r -> recordings == null || recordings.test(r))) .map( recording -> { try { @@ -261,7 +254,7 @@ public Uni doSnapshot(@Source Target target) { @Blocking @Transactional @Description("Stop the specified Flight Recording") - public Uni doStop(@Source ActiveRecording recording) { + public Uni doStop(@Source ActiveRecording recording) throws Exception { var ar = ActiveRecording.findById(recording.id); return recordingHelper.stopRecording(ar); } @@ -358,7 +351,7 @@ public void setLabels(Map labels) { @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class RecordingMetadata { - public @Nullable Map labels; + public @Nullable Map labels = new HashMap<>(); } @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") diff --git a/src/main/java/io/cryostat/graphql/RootNode.java b/src/main/java/io/cryostat/graphql/RootNode.java index ba5f5e74e..90b76fcb4 100644 --- a/src/main/java/io/cryostat/graphql/RootNode.java +++ b/src/main/java/io/cryostat/graphql/RootNode.java @@ -68,6 +68,7 @@ static Set recurseChildren( public static class DiscoveryNodeFilter implements Predicate { public @Nullable Long id; public @Nullable List ids; + public @Nullable List targetIds; public @Nullable String name; public @Nullable List names; public @Nullable List nodeTypes; @@ -78,6 +79,12 @@ public static class DiscoveryNodeFilter implements Predicate { public boolean test(DiscoveryNode t) { Predicate matchesId = n -> id == null || id.equals(n.id); Predicate matchesIds = n -> ids == null || ids.contains(n.id); + Predicate matchesTargetIds = + n -> + targetIds == null + || (targetIds != null + && n.target != null + && targetIds.contains(n.target.id)); Predicate matchesName = n -> name == null || name.equals(n.name); Predicate matchesNames = n -> names == null || names.contains(n.name); Predicate matchesNodeTypes = @@ -104,6 +111,7 @@ public boolean test(DiscoveryNode t) { return List.of( matchesId, matchesIds, + matchesTargetIds, matchesName, matchesNames, matchesNodeTypes, diff --git a/src/main/java/io/cryostat/graphql/TargetNodes.java b/src/main/java/io/cryostat/graphql/TargetNodes.java index 321653955..dfe4907a0 100644 --- a/src/main/java/io/cryostat/graphql/TargetNodes.java +++ b/src/main/java/io/cryostat/graphql/TargetNodes.java @@ -15,7 +15,9 @@ */ package io.cryostat.graphql; +import java.util.ArrayList; import java.util.List; +import java.util.Objects; import io.cryostat.core.net.JFRConnection; import io.cryostat.core.net.MBeanMetrics; @@ -36,6 +38,7 @@ import io.smallrye.graphql.api.Nullable; import io.smallrye.mutiny.Uni; import jakarta.inject.Inject; +import org.apache.commons.lang3.StringUtils; import org.eclipse.microprofile.graphql.Description; import org.eclipse.microprofile.graphql.GraphQLApi; import org.eclipse.microprofile.graphql.NonNull; @@ -61,6 +64,7 @@ public List getTargetNodes(DiscoveryNodeFilter filter) { // the one we end up selecting for here. // .filter(distinctWith(t -> t.jvmId)) .map(t -> t.discoveryNode) + .filter(Objects::nonNull) .filter(n -> filter == null ? true : filter.test(n)) .toList(); } @@ -75,11 +79,13 @@ public ActiveRecordings activeRecordings( @Source Target target, @Nullable ActiveRecordingsFilter filter) { var fTarget = Target.findById(target.id); var recordings = new ActiveRecordings(); - recordings.data = - fTarget.activeRecordings.stream() - .filter(r -> filter == null || filter.test(r)) - .toList(); - recordings.aggregate = AggregateInfo.fromActive(recordings.data); + if (StringUtils.isNotBlank(fTarget.jvmId)) { + recordings.data = + recordingHelper.listActiveRecordings(fTarget).stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromActive(recordings.data); + } return recordings; } @@ -88,11 +94,13 @@ public ArchivedRecordings archivedRecordings( @Source Target target, @Nullable ArchivedRecordingsFilter filter) { var fTarget = Target.findById(target.id); var recordings = new ArchivedRecordings(); - recordings.data = - recordingHelper.listArchivedRecordings(fTarget).stream() - .filter(r -> filter == null || filter.test(r)) - .toList(); - recordings.aggregate = AggregateInfo.fromArchived(recordings.data); + if (StringUtils.isNotBlank(fTarget.jvmId)) { + recordings.data = + recordingHelper.listArchivedRecordings(fTarget).stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromArchived(recordings.data); + } return recordings; } @@ -100,15 +108,17 @@ public ArchivedRecordings archivedRecordings( @Description("Get the active and archived recordings belonging to this target") public Recordings recordings(@Source Target target, Context context) { var fTarget = Target.findById(target.id); + var recordings = new Recordings(); + if (StringUtils.isBlank(fTarget.jvmId)) { + return recordings; + } var dfe = context.unwrap(DataFetchingEnvironment.class); var requestedFields = dfe.getSelectionSet().getFields().stream().map(field -> field.getName()).toList(); - var recordings = new Recordings(); - if (requestedFields.contains("active")) { recordings.active = new ActiveRecordings(); - recordings.active.data = fTarget.activeRecordings; + recordings.active.data = recordingHelper.listActiveRecordings(fTarget); recordings.active.aggregate = AggregateInfo.fromActive(recordings.active.data); } @@ -130,20 +140,20 @@ public Uni mbeanMetrics(@Source Target target) { @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class Recordings { - public @NonNull ActiveRecordings active; - public @NonNull ArchivedRecordings archived; + public @NonNull ActiveRecordings active = new ActiveRecordings(); + public @NonNull ArchivedRecordings archived = new ArchivedRecordings(); } @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class ActiveRecordings { - public @NonNull List data; - public @NonNull AggregateInfo aggregate; + public @NonNull List data = new ArrayList<>(); + public @NonNull AggregateInfo aggregate = AggregateInfo.fromActive(data); } @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class ArchivedRecordings { - public @NonNull List data; - public @NonNull AggregateInfo aggregate; + public @NonNull List data = new ArrayList<>(); + public @NonNull AggregateInfo aggregate = AggregateInfo.fromArchived(data); } @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") diff --git a/src/main/java/io/cryostat/recordings/ActiveRecording.java b/src/main/java/io/cryostat/recordings/ActiveRecording.java index 9ff46c66e..e0fa5b750 100644 --- a/src/main/java/io/cryostat/recordings/ActiveRecording.java +++ b/src/main/java/io/cryostat/recordings/ActiveRecording.java @@ -30,6 +30,7 @@ import io.cryostat.ws.MessagingServer; import io.cryostat.ws.Notification; +import com.fasterxml.jackson.annotation.JsonIgnore; import io.quarkus.hibernate.orm.panache.PanacheEntity; import io.vertx.mutiny.core.eventbus.EventBus; import jakarta.enterprise.context.ApplicationScoped; @@ -42,11 +43,8 @@ import jakarta.persistence.PostPersist; import jakarta.persistence.PostRemove; import jakarta.persistence.PostUpdate; -import jakarta.persistence.PreRemove; -import jakarta.persistence.PreUpdate; import jakarta.persistence.Table; import jakarta.persistence.UniqueConstraint; -import jakarta.transaction.Transactional; import jakarta.validation.constraints.NotBlank; import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.PositiveOrZero; @@ -59,7 +57,9 @@ @EntityListeners(ActiveRecording.Listener.class) @Table( uniqueConstraints = { - @UniqueConstraint(columnNames = {"target_id", "name"}), + // remoteId is the unique ID assigned by the JVM to its own recordings, so these IDs are + // unique but only within the scope of each JVM. Since they are just sequential numeric + // IDs, they will not be unique across different JVMs. @UniqueConstraint(columnNames = {"target_id", "remoteId"}) }) public class ActiveRecording extends PanacheEntity { @@ -80,6 +80,12 @@ public class ActiveRecording extends PanacheEntity { @PositiveOrZero public long maxSize; @PositiveOrZero public long maxAge; + /** + * true if the recording was discovered on the Target and must have been created by some + * external process (not Cryostat), false if created by Cryostat. + */ + @JsonIgnore public boolean external; + @JdbcTypeCode(SqlTypes.JSON) @NotNull public Metadata metadata; @@ -145,27 +151,6 @@ public void setMetadata(Metadata metadata) { this.metadata = metadata; } - @Transactional - public static boolean deleteFromTarget(Target target, String recordingName) { - Optional recording = - target.activeRecordings.stream() - .filter(r -> r.name.equals(recordingName)) - .findFirst(); - boolean found = recording.isPresent(); - if (found) { - Logger.getLogger(ActiveRecording.class) - .debugv("Found and deleting match: {0} / {1}", target.alias, recording.get()); - recording.get().delete(); - getEntityManager().flush(); - } else { - Logger.getLogger(ActiveRecording.class) - .debugv( - "No match found for recording {0} in target {1}", - recordingName, target.alias); - } - return found; - } - @ApplicationScoped static class Listener { @@ -176,6 +161,9 @@ static class Listener { @PostPersist public void postPersist(ActiveRecording activeRecording) { + if (activeRecording.external) { + return; + } bus.publish( Recordings.RecordingEventCategory.ACTIVE_CREATED.category(), activeRecording); notify( @@ -184,45 +172,11 @@ public void postPersist(ActiveRecording activeRecording) { ActiveRecordingEvent.Payload.of(recordingHelper, activeRecording))); } - @PreUpdate - public void preUpdate(ActiveRecording activeRecording) throws Exception { - if (RecordingState.STOPPED.equals(activeRecording.state)) { - try { - connectionManager.executeConnectedTask( - activeRecording.target, - conn -> { - RecordingHelper.getDescriptorById(conn, activeRecording.remoteId) - .ifPresent( - d -> { - // this connection can fail if we are removing - // this recording as a cascading operation after - // the owner target was lost. It isn't too - // important in that case that we are unable to - // connect to the target and close the actual - // recording, because the target probably went - // offline or we otherwise just can't reach it. - try { - if (!d.getState() - .equals( - IRecordingDescriptor - .RecordingState - .STOPPED)) { - conn.getService().stop(d); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - return null; - }); - } catch (Exception e) { - logger.error("Failed to stop remote recording", e); - } - } - } - @PostUpdate public void postUpdate(ActiveRecording activeRecording) { + if (activeRecording.external) { + return; + } if (RecordingState.STOPPED.equals(activeRecording.state)) { bus.publish( Recordings.RecordingEventCategory.ACTIVE_STOPPED.category(), @@ -234,37 +188,11 @@ public void postUpdate(ActiveRecording activeRecording) { } } - @PreRemove - public void preRemove(ActiveRecording activeRecording) throws Exception { - try { - activeRecording.target.activeRecordings.remove(activeRecording); - connectionManager.executeConnectedTask( - activeRecording.target, - conn -> { - // this connection can fail if we are removing this recording as a - // cascading operation after the owner target was lost. It isn't too - // important in that case that we are unable to connect to the target - // and close the actual recording, because the target probably went - // offline or we otherwise just can't reach it. - try { - RecordingHelper.getDescriptor(conn, activeRecording) - .ifPresent( - rec -> - Recordings.safeCloseRecording( - conn, rec, logger)); - } catch (Exception e) { - logger.info(e); - } - return null; - }); - } catch (Exception e) { - logger.error(e); - throw e; - } - } - @PostRemove public void postRemove(ActiveRecording activeRecording) { + if (activeRecording.external) { + return; + } bus.publish( Recordings.RecordingEventCategory.ACTIVE_DELETED.category(), activeRecording); notify( diff --git a/src/main/java/io/cryostat/recordings/RecordingHelper.java b/src/main/java/io/cryostat/recordings/RecordingHelper.java index c5590e19a..1a615128a 100644 --- a/src/main/java/io/cryostat/recordings/RecordingHelper.java +++ b/src/main/java/io/cryostat/recordings/RecordingHelper.java @@ -32,12 +32,15 @@ import java.time.ZoneOffset; import java.time.temporal.ChronoUnit; import java.util.ArrayList; +import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -46,9 +49,7 @@ import org.openjdk.jmc.common.unit.IConstrainedMap; import org.openjdk.jmc.common.unit.QuantityConversionException; -import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID; import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder; -import org.openjdk.jmc.rjmx.services.jfr.IEventTypeInfo; import org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor; import io.cryostat.ConfigProperties; @@ -98,6 +99,15 @@ import org.apache.hc.core5.http.HttpStatus; import org.eclipse.microprofile.config.inject.ConfigProperty; import org.jboss.logging.Logger; +import org.quartz.Job; +import org.quartz.JobBuilder; +import org.quartz.JobDetail; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; +import org.quartz.JobKey; +import org.quartz.Scheduler; +import org.quartz.Trigger; +import org.quartz.TriggerBuilder; import software.amazon.awssdk.core.exception.SdkClientException; import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.services.s3.S3Client; @@ -138,7 +148,7 @@ public class RecordingHelper { @Inject EventOptionsBuilder.Factory eventOptionsBuilderFactory; @Inject TargetTemplateService.Factory targetTemplateServiceFactory; @Inject S3TemplateService customTemplateService; - @Inject RecordingHelper recordingHelper; + @Inject Scheduler scheduler; @Inject @Named(Producers.BASE64_URL) @@ -161,6 +171,8 @@ public class RecordingHelper { CompletableFuture grafanaDatasourceURL = new CompletableFuture<>(); + private final List jobs = new CopyOnWriteArrayList<>(); + void onStart(@Observes StartupEvent evt) { if (grafanaDatasourceURLProperty.isEmpty()) { grafanaDatasourceURL.completeExceptionally( @@ -199,7 +211,92 @@ void onStart(@Observes StartupEvent evt) { } } - @Transactional + // FIXME hacky. This opens a remote connection on each call and updates our database with the + // data we find there. We should have some remote connection callback (JMX listener, WebSocket) + // to the target and update our database when remote recording events occur, rather than doing a + // full sync when this method is called. + public List listActiveRecordings(Target target) { + target = Target.find("id", target.id).singleResult(); + try { + var previousRecordings = target.activeRecordings; + var previousIds = + new HashSet<>(previousRecordings.stream().map(r -> r.remoteId).toList()); + var previousNames = + new HashSet<>(previousRecordings.stream().map(r -> r.name).toList()); + List descriptors = + connectionManager.executeConnectedTask( + target, conn -> conn.getService().getAvailableRecordings()); + boolean updated = false; + var it = target.activeRecordings.iterator(); + while (it.hasNext()) { + var r = it.next(); + if (!previousIds.contains(r.remoteId)) { + r.delete(); + it.remove(); + updated |= true; + } + } + for (var descriptor : descriptors) { + if (previousIds.contains(descriptor.getId())) { + var recording = target.getRecordingById(descriptor.getId()); + switch (descriptor.getState()) { + case CREATED: + recording.state = RecordingState.DELAYED; + updated |= true; + break; + case RUNNING: + recording.state = RecordingState.RUNNING; + updated |= true; + break; + case STOPPING: + recording.state = RecordingState.RUNNING; + updated |= true; + break; + case STOPPED: + recording.state = RecordingState.STOPPED; + updated |= true; + break; + default: + recording.state = RecordingState.NEW; + updated |= true; + break; + } + if (updated) { + recording.persist(); + } + continue; + } + updated |= true; + // TODO is there any metadata to attach here? + var recording = ActiveRecording.from(target, descriptor, new Metadata(Map.of())); + recording.external = true; + // FIXME this is a hack. Older Cryostat versions enforced that recordings' names + // were unique within the target JVM, but this could only be enforced when Cryostat + // was originating the recording creation. Recordings already have unique IDs, so + // enforcing unique names was only for the purpose of providing a tidier UI. We + // should remove this assumption/enforcement and allow recordings to have non-unique + // names. However, the UI is currently built with this expectation and often uses + // recordings' names as unique keys rather than their IDs. + while (previousNames.contains(recording.name)) { + recording.name = String.format("%s-%d", recording.name, recording.remoteId); + } + previousNames.add(recording.name); + previousIds.add(recording.remoteId); + recording.persist(); + target.activeRecordings.add(recording); + } + if (updated) { + target.persist(); + } + } catch (Exception e) { + logger.errorv( + e, + "Failure to synchronize existing target recording state for {0}", + target.connectUrl); + } + return target.activeRecordings; + } + public Uni startRecording( Target target, RecordingReplace replace, @@ -207,6 +304,7 @@ public Uni startRecording( RecordingOptions options, Map rawLabels) throws QuantityConversionException { + String recordingName = options.name(); return connectionManager.executeConnectedTaskUni( target, conn -> { @@ -238,17 +336,17 @@ public Uni startRecording( throw new EntityExistsException( "Recording", options.name()); } - if (!ActiveRecording.deleteFromTarget( - target, options.name())) { - logger.warnf( - "Could not delete recording %s from target %s", - options.name(), target.alias); - } + listActiveRecordings(target).stream() + .filter(r -> r.name.equals(recordingName)) + .forEach(this::deleteRecording); }); IRecordingDescriptor desc = conn.getService() - .start(recordingOptions, enableEvents(target, template)); + .start( + recordingOptions, + template.getName(), + template.getType()); Map labels = new HashMap<>(rawLabels); labels.put("template.name", template.getName()); @@ -261,6 +359,28 @@ public Uni startRecording( target.activeRecordings.add(recording); target.persist(); + if (!recording.continuous) { + JobDetail jobDetail = + JobBuilder.newJob(StopRecordingJob.class) + .withIdentity(recording.name, target.jvmId) + .build(); + if (!jobs.contains(jobDetail.getKey())) { + Map data = jobDetail.getJobDataMap(); + data.put("recordingId", recording.id); + data.put("archive", options.archiveOnStop().orElse(false)); + Trigger trigger = + TriggerBuilder.newTrigger() + .withIdentity(recording.name, target.jvmId) + .usingJobData(jobDetail.getJobDataMap()) + .startAt( + new Date( + System.currentTimeMillis() + + recording.duration)) + .build(); + scheduler.scheduleJob(jobDetail, trigger); + } + } + logger.tracev( "Started recording: {0} {1}", target.connectUrl, target.activeRecordings); @@ -332,19 +452,6 @@ public Uni createSnapshot(Target target) { }); } - public Uni stopRecording(ActiveRecording recording) { - recording.state = RecordingState.STOPPED; - recording.persist(); - return Uni.createFrom().item(recording); - } - - @Transactional - public Uni deleteRecording(ActiveRecording recording) { - recording.delete(); - recording.persist(); - return Uni.createFrom().item(recording); - } - private boolean snapshotIsReadable(Target target, InputStream snapshot) throws IOException { if (!connectionManager.markConnectionInUse(target)) { throw new IOException( @@ -373,6 +480,55 @@ private boolean shouldRestartRecording( } } + public Uni stopRecording(ActiveRecording recording, boolean archive) + throws Exception { + var out = + connectionManager.executeConnectedTask( + recording.target, + conn -> { + var desc = getDescriptorById(conn, recording.remoteId); + if (desc.isEmpty()) { + throw new NotFoundException(); + } + if (!desc.get() + .getState() + .equals( + org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor + .RecordingState.STOPPED)) { + conn.getService().stop(desc.get()); + } + recording.state = RecordingState.STOPPED; + return recording; + }); + out.persist(); + if (archive) { + archiveRecording(out, null, null); + } + return Uni.createFrom().item(out); + } + + public Uni stopRecording(ActiveRecording recording) throws Exception { + return stopRecording(recording, false); + } + + public Uni deleteRecording(ActiveRecording recording) { + var closed = + connectionManager.executeConnectedTask( + recording.target, + conn -> { + var desc = getDescriptorById(conn, recording.remoteId); + if (desc.isEmpty()) { + throw new NotFoundException(); + } + conn.getService().close(desc.get()); + return recording; + }); + closed.target.activeRecordings.remove(recording); + closed.target.persist(); + closed.delete(); + return Uni.createFrom().item(closed); + } + public LinkedRecordingDescriptor toExternalForm(ActiveRecording recording) { return new LinkedRecordingDescriptor( recording.id, @@ -405,44 +561,6 @@ public Pair parseEventSpecifier(String eventSpecifier) { throw new BadRequestException(eventSpecifier); } - private IConstrainedMap enableAllEvents(Target target) throws Exception { - return connectionManager.executeConnectedTask( - target, - connection -> { - EventOptionsBuilder builder = eventOptionsBuilderFactory.create(connection); - - for (IEventTypeInfo eventTypeInfo : - connection.getService().getAvailableEventTypes()) { - builder.addEvent( - eventTypeInfo.getEventTypeID().getFullKey(), "enabled", "true"); - } - - return builder.build(); - }); - } - - private IConstrainedMap enableEvents(Target target, Template eventTemplate) - throws Exception { - if (EventTemplates.ALL_EVENTS_TEMPLATE.equals(eventTemplate)) { - return enableAllEvents(target); - } - switch (eventTemplate.getType()) { - case TARGET: - return targetTemplateServiceFactory - .create(target) - .getEvents(eventTemplate.getName(), eventTemplate.getType()) - .orElseThrow(); - case CUSTOM: - return customTemplateService - .getEvents(eventTemplate.getName(), eventTemplate.getType()) - .orElseThrow(); - default: - throw new BadRequestException( - String.format( - "Invalid/unknown event template %s", eventTemplate.getName())); - } - } - public Template getPreferredTemplate( Target target, String templateName, TemplateType templateType) { Objects.requireNonNull(target); @@ -490,8 +608,7 @@ public Template getPreferredTemplate( } } - static Optional getDescriptorById( - JFRConnection connection, long remoteId) { + Optional getDescriptorById(JFRConnection connection, long remoteId) { try { return connection.getService().getAvailableRecordings().stream() .filter(r -> remoteId == r.getId()) @@ -501,12 +618,12 @@ static Optional getDescriptorById( } } - static Optional getDescriptor( + Optional getDescriptor( JFRConnection connection, ActiveRecording activeRecording) { return getDescriptorById(connection, activeRecording.remoteId); } - public static Optional getDescriptorByName( + public Optional getDescriptorByName( JFRConnection connection, String recordingName) { try { return connection.getService().getAvailableRecordings().stream() @@ -602,25 +719,25 @@ public List listArchivedRecordings(Target target) { } public ArchivedRecording archiveRecording( - ActiveRecording activeRecording, String savename, Instant expiry) throws Exception { + ActiveRecording recording, String savename, Instant expiry) throws Exception { // AWS object key name guidelines advise characters to avoid (% so we should not pass url // encoded characters) String transformedAlias = - URLDecoder.decode(activeRecording.target.alias, StandardCharsets.UTF_8) + URLDecoder.decode(recording.target.alias, StandardCharsets.UTF_8) .replaceAll("[\\._/]+", "-"); Instant now = clock.now(); String timestamp = now.truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", ""); String filename = - String.format("%s_%s_%s.jfr", transformedAlias, activeRecording.name, timestamp); + String.format("%s_%s_%s.jfr", transformedAlias, recording.name, timestamp); if (StringUtils.isBlank(savename)) { savename = filename; } int mib = 1024 * 1024; - String key = archivedRecordingKey(activeRecording.target.jvmId, filename); + String key = archivedRecordingKey(recording.target.jvmId, filename); String multipartId = null; List> parts = new ArrayList<>(); long accum = 0; - try (var stream = remoteRecordingStreamFactory.open(activeRecording); + try (var stream = getActiveInputStream(recording); var ch = Channels.newChannel(stream)) { ByteBuffer buf = ByteBuffer.allocate(20 * mib); CreateMultipartUploadRequest.Builder builder = @@ -630,7 +747,7 @@ public ArchivedRecording archiveRecording( .contentType(JFR_MIME) .contentDisposition( String.format("attachment; filename=\"%s\"", savename)) - .tagging(createActiveRecordingTagging(activeRecording, expiry)); + .tagging(createActiveRecordingTagging(recording, expiry)); if (expiry != null && expiry.isAfter(Instant.now())) { builder = builder.expires(expiry); } @@ -717,18 +834,18 @@ public ArchivedRecording archiveRecording( var event = new ActiveRecordingEvent( Recordings.RecordingEventCategory.ACTIVE_SAVED, - ActiveRecordingEvent.Payload.of(this, activeRecording)); + ActiveRecordingEvent.Payload.of(this, recording)); bus.publish(event.category().category(), event.payload().recording()); bus.publish( MessagingServer.class.getName(), new Notification(event.category().category(), event.payload())); } return new ArchivedRecording( - activeRecording.target.jvmId, + recording.target.jvmId, filename, - downloadUrl(activeRecording.target.jvmId, filename), - reportUrl(activeRecording.target.jvmId, filename), - activeRecording.metadata, + downloadUrl(recording.target.jvmId, filename), + reportUrl(recording.target.jvmId, filename), + recording.metadata, accum, now.getEpochSecond()); } @@ -841,6 +958,16 @@ private int retryRead(ReadableByteChannel channel, ByteBuffer buffer) throws IOE return read; } + void safeCloseRecording(JFRConnection conn, IRecordingDescriptor rec) { + try { + conn.getService().close(rec); + } catch (org.openjdk.jmc.rjmx.services.jfr.FlightRecorderException e) { + logger.error("Failed to stop remote recording", e); + } catch (Exception e) { + logger.error("Unexpected exception", e); + } + } + /* Archived Recording Helpers */ public void deleteArchivedRecording(String jvmId, String filename) { storage.deleteObject( @@ -950,7 +1077,7 @@ public ActiveRecording updateRecordingMetadata( notify( new ActiveRecordingEvent( Recordings.RecordingEventCategory.METADATA_UPDATED, - ActiveRecordingEvent.Payload.of(recordingHelper, recording))); + ActiveRecordingEvent.Payload.of(this, recording))); } return recording; } @@ -1130,6 +1257,29 @@ public static RecordingReplace fromString(String replace) { } } + static class StopRecordingJob implements Job { + + @Inject RecordingHelper recordingHelper; + @Inject Logger logger; + + @Override + @Transactional + public void execute(JobExecutionContext ctx) throws JobExecutionException { + var jobDataMap = ctx.getJobDetail().getJobDataMap(); + try { + Optional recording = + ActiveRecording.find("id", (Long) jobDataMap.get("recordingId")) + .singleResultOptional(); + if (recording.isPresent()) { + recordingHelper.stopRecording( + recording.get(), (Boolean) jobDataMap.get("archive")); + } + } catch (Exception e) { + throw new JobExecutionException(e); + } + } + } + static class RecordingNotFoundException extends Exception { public RecordingNotFoundException(String targetId, String recordingName) { super( diff --git a/src/main/java/io/cryostat/recordings/Recordings.java b/src/main/java/io/cryostat/recordings/Recordings.java index 545a72d88..ba1bf5965 100644 --- a/src/main/java/io/cryostat/recordings/Recordings.java +++ b/src/main/java/io/cryostat/recordings/Recordings.java @@ -29,15 +29,12 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.openjdk.jmc.common.unit.IConstrainedMap; import org.openjdk.jmc.common.unit.IOptionDescriptor; import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder; -import org.openjdk.jmc.rjmx.services.jfr.FlightRecorderException; import org.openjdk.jmc.rjmx.services.jfr.IFlightRecorderService; import org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor; @@ -47,7 +44,6 @@ import io.cryostat.V2Response; import io.cryostat.core.EventOptionsBuilder; import io.cryostat.core.RecordingOptionsCustomizer; -import io.cryostat.core.net.JFRConnection; import io.cryostat.core.sys.Clock; import io.cryostat.core.templates.Template; import io.cryostat.core.templates.TemplateType; @@ -121,7 +117,6 @@ public class Recordings { @Inject StorageBuckets storageBuckets; @Inject S3Presigner presigner; @Inject RemoteRecordingInputStreamFactory remoteRecordingStreamFactory; - @Inject ScheduledExecutorService scheduler; @Inject ObjectMapper mapper; @Inject RecordingHelper recordingHelper; @Inject Logger logger; @@ -452,7 +447,9 @@ public Collection listFsArchives(@RestPath String jv @RolesAllowed("read") public List listForTarget(@RestPath long id) throws Exception { Target target = Target.find("id", id).singleResult(); - return target.activeRecordings.stream().map(recordingHelper::toExternalForm).toList(); + return recordingHelper.listActiveRecordings(target).stream() + .map(recordingHelper::toExternalForm) + .toList(); } @GET @@ -474,7 +471,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod throws Exception { Target target = Target.find("id", targetId).singleResult(); Optional recording = - target.activeRecordings.stream() + recordingHelper.listActiveRecordings(target).stream() .filter(rec -> rec.remoteId == remoteId) .findFirst(); if (!recording.isPresent()) { @@ -513,7 +510,7 @@ public Response patchV1(@RestPath URI connectUrl, @RestPath String recordingName Target target = Target.getTargetByConnectUrl(connectUrl); Optional recording = connectionManager.executeConnectedTask( - target, conn -> RecordingHelper.getDescriptorByName(conn, recordingName)); + target, conn -> recordingHelper.getDescriptorByName(conn, recordingName)); if (recording.isEmpty()) { throw new NotFoundException(); } @@ -596,7 +593,7 @@ public Response createRecording( @RestForm Optional replace, // restart param is deprecated, only 'replace' should be used and takes priority if both // are provided - @RestForm Optional restart, + @Deprecated @RestForm Optional restart, @RestForm Optional duration, @RestForm Optional toDisk, @RestForm Optional maxAge, @@ -644,35 +641,11 @@ public Response createRecording( .await() .atMost(Duration.ofSeconds(10)); - if (recording.duration > 0) { - scheduler.schedule( - () -> stopRecording(recording.id, archiveOnStop.orElse(false)), - recording.duration, - TimeUnit.MILLISECONDS); - } - return Response.status(Response.Status.CREATED) .entity(recordingHelper.toExternalForm(recording)) .build(); } - @Transactional - void stopRecording(long id, boolean archive) { - ActiveRecording.findByIdOptional(id) - .ifPresent( - recording -> { - try { - recording.state = RecordingState.STOPPED; - recording.persist(); - if (archive) { - recordingHelper.archiveRecording(recording, null, null); - } - } catch (Exception e) { - logger.error("couldn't update recording", e); - } - }); - } - @POST @Transactional @Blocking @@ -700,7 +673,7 @@ public Response deleteRecordingV1(@RestPath URI connectUrl, @RestPath String rec } Target target = Target.getTargetByConnectUrl(connectUrl); long remoteId = - target.activeRecordings.stream() + recordingHelper.listActiveRecordings(target).stream() .filter(r -> Objects.equals(r.name, recordingName)) .findFirst() .map(r -> r.remoteId) @@ -720,14 +693,11 @@ public Response deleteRecordingV1(@RestPath URI connectUrl, @RestPath String rec @RolesAllowed("write") public void deleteRecording(@RestPath long targetId, @RestPath long remoteId) throws Exception { Target target = Target.find("id", targetId).singleResult(); - target.activeRecordings.stream() - .filter(r -> r.remoteId == remoteId) - .findFirst() - .ifPresentOrElse( - recordingHelper::deleteRecording, - () -> { - throw new NotFoundException(); - }); + var recording = target.getRecordingById(remoteId); + if (recording == null) { + throw new NotFoundException(); + } + recordingHelper.deleteRecording(recording); } @DELETE @@ -791,16 +761,6 @@ public void deleteArchivedRecording(@RestPath String jvmId, @RestPath String fil } } - static void safeCloseRecording(JFRConnection conn, IRecordingDescriptor rec, Logger logger) { - try { - conn.getService().close(rec); - } catch (FlightRecorderException e) { - logger.error("Failed to stop remote recording", e); - } catch (Exception e) { - logger.error("Unexpected exception", e); - } - } - @POST @Blocking @Path("/api/v1/targets/{connectUrl}/recordings/{recordingName}/upload") @@ -809,7 +769,7 @@ public Response uploadActiveToGrafanaV1( @RestPath URI connectUrl, @RestPath String recordingName) { Target target = Target.getTargetByConnectUrl(connectUrl); long remoteId = - target.activeRecordings.stream() + recordingHelper.listActiveRecordings(target).stream() .filter(r -> Objects.equals(r.name, recordingName)) .findFirst() .map(r -> r.remoteId) diff --git a/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java b/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java index c95677a1d..4b80e0a9c 100644 --- a/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java +++ b/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java @@ -31,13 +31,14 @@ public class RemoteRecordingInputStreamFactory { @Inject TargetConnectionManager connectionManager; + @Inject RecordingHelper recordingHelper; public ProgressInputStream open(ActiveRecording recording) throws Exception { return connectionManager.executeConnectedTask( recording.target, conn -> { IRecordingDescriptor desc = - RecordingHelper.getDescriptor(conn, recording).orElseThrow(); + recordingHelper.getDescriptor(conn, recording).orElseThrow(); return open(conn, recording.target, desc); }); } diff --git a/src/main/java/io/cryostat/reports/Reports.java b/src/main/java/io/cryostat/reports/Reports.java index 0950fc82b..dfd3677a7 100644 --- a/src/main/java/io/cryostat/reports/Reports.java +++ b/src/main/java/io/cryostat/reports/Reports.java @@ -116,7 +116,7 @@ public Uni> get(@RestPath String encodedKey) { public Response getActiveV1(@RestPath String targetId, @RestPath String recordingName) { var target = Target.getTargetByConnectUrl(URI.create(targetId)); var recording = - target.activeRecordings.stream() + helper.listActiveRecordings(target).stream() .filter(r -> r.name.equals(recordingName)) .findFirst() .orElseThrow(); diff --git a/src/main/java/io/cryostat/rules/RuleService.java b/src/main/java/io/cryostat/rules/RuleService.java index e24915ef1..983fe0057 100644 --- a/src/main/java/io/cryostat/rules/RuleService.java +++ b/src/main/java/io/cryostat/rules/RuleService.java @@ -226,10 +226,11 @@ private void scheduleArchival(Rule rule, Target target, ActiveRecording recordin try { quartz.scheduleJob(jobDetail, trigger); } catch (SchedulerException e) { - logger.infov( + logger.errorv( + e, "Failed to schedule archival job for rule {0} in target {1}", - rule.name, target.alias); - logger.error(e); + rule.name, + target.alias); } jobs.add(jobDetail.getKey()); } diff --git a/src/main/java/io/cryostat/targets/AgentConnectionFactory.java b/src/main/java/io/cryostat/targets/AgentApiException.java similarity index 58% rename from src/main/java/io/cryostat/targets/AgentConnectionFactory.java rename to src/main/java/io/cryostat/targets/AgentApiException.java index 6b87538b7..b67bfdb02 100644 --- a/src/main/java/io/cryostat/targets/AgentConnectionFactory.java +++ b/src/main/java/io/cryostat/targets/AgentApiException.java @@ -15,22 +15,10 @@ */ package io.cryostat.targets; -import java.net.URI; +import jakarta.ws.rs.WebApplicationException; -import io.cryostat.core.sys.Clock; - -import io.vertx.mutiny.core.Vertx; -import io.vertx.mutiny.ext.web.client.WebClient; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; - -@ApplicationScoped -class AgentConnectionFactory { - - @Inject Vertx vertx; - @Inject Clock clock; - - AgentConnection createConnection(URI agentUri) { - return new AgentConnection(agentUri, WebClient.create(vertx), clock); +public class AgentApiException extends WebApplicationException { + public AgentApiException(int statusCode) { + super(String.format("Unexpected HTTP response code %d", statusCode)); } } diff --git a/src/main/java/io/cryostat/targets/AgentClient.java b/src/main/java/io/cryostat/targets/AgentClient.java new file mode 100644 index 000000000..9eec74c1c --- /dev/null +++ b/src/main/java/io/cryostat/targets/AgentClient.java @@ -0,0 +1,578 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.targets; + +import java.net.URI; +import java.time.Duration; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.openjdk.jmc.common.unit.IConstrainedMap; +import org.openjdk.jmc.common.unit.IConstraint; +import org.openjdk.jmc.common.unit.IMutableConstrainedMap; +import org.openjdk.jmc.common.unit.IOptionDescriptor; +import org.openjdk.jmc.common.unit.QuantityConversionException; +import org.openjdk.jmc.common.unit.SimpleConstrainedMap; +import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID; +import org.openjdk.jmc.flightrecorder.configuration.events.IEventTypeID; +import org.openjdk.jmc.flightrecorder.configuration.internal.EventTypeIDV2; +import org.openjdk.jmc.rjmx.services.jfr.IEventTypeInfo; +import org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor; + +import io.cryostat.ConfigProperties; +import io.cryostat.core.net.MBeanMetrics; +import io.cryostat.core.serialization.SerializableRecordingDescriptor; +import io.cryostat.credentials.Credential; +import io.cryostat.credentials.CredentialsFinder; +import io.cryostat.targets.AgentJFRService.StartRecordingRequest; +import io.cryostat.util.HttpStatusCodeIdentifier; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.smallrye.mutiny.Uni; +import io.smallrye.mutiny.unchecked.Unchecked; +import io.vertx.core.http.HttpMethod; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.auth.authentication.UsernamePasswordCredentials; +import io.vertx.mutiny.core.buffer.Buffer; +import io.vertx.mutiny.ext.web.client.HttpRequest; +import io.vertx.mutiny.ext.web.client.HttpResponse; +import io.vertx.mutiny.ext.web.client.WebClient; +import io.vertx.mutiny.ext.web.codec.BodyCodec; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.ws.rs.ForbiddenException; +import jdk.jfr.RecordingState; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hc.client5.http.auth.InvalidCredentialsException; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.logging.Logger; + +public class AgentClient { + + public static final String NULL_CREDENTIALS = "No credentials found for agent"; + + private final Target target; + private final WebClient webClient; + private final Duration httpTimeout; + private final ObjectMapper mapper; + private final CredentialsFinder credentialsFinder; + private final Logger logger = Logger.getLogger(getClass()); + + private AgentClient( + Target target, + WebClient webClient, + ObjectMapper mapper, + Duration httpTimeout, + CredentialsFinder credentialsFinder) { + this.target = target; + this.webClient = webClient; + this.mapper = mapper; + this.httpTimeout = httpTimeout; + this.credentialsFinder = credentialsFinder; + } + + Target getTarget() { + return target; + } + + URI getUri() { + return getTarget().connectUrl; + } + + Duration getTimeout() { + return httpTimeout; + } + + Uni ping() { + return invoke(HttpMethod.GET, "/", BodyCodec.none()) + .map(HttpResponse::statusCode) + .map(HttpStatusCodeIdentifier::isSuccessCode); + } + + Uni mbeanMetrics() { + return invoke(HttpMethod.GET, "/mbean-metrics/", BodyCodec.string()) + .map(HttpResponse::body) + .map(Unchecked.function(s -> mapper.readValue(s, MBeanMetrics.class))); + } + + Uni startRecording(StartRecordingRequest req) { + try { + return invoke( + HttpMethod.POST, + "/recordings/", + Buffer.buffer(mapper.writeValueAsBytes(req)), + BodyCodec.string()) + .map( + Unchecked.function( + resp -> { + int statusCode = resp.statusCode(); + if (HttpStatusCodeIdentifier.isSuccessCode(statusCode)) { + String body = resp.body(); + return mapper.readValue( + body, + SerializableRecordingDescriptor.class) + .toJmcForm(); + } else if (statusCode == 403) { + logger.errorv( + "startRecording for {0} failed: HTTP 403", + getUri()); + throw new ForbiddenException( + new UnsupportedOperationException( + "startRecording")); + } else { + logger.errorv( + "startRecording for {0} failed: HTTP {1}", + getUri(), statusCode); + throw new AgentApiException(statusCode); + } + })); + } catch (JsonProcessingException e) { + logger.error("startRecording request failed", e); + return Uni.createFrom().failure(e); + } + } + + Uni startSnapshot() { + try { + return invoke( + HttpMethod.POST, + "/recordings/", + Buffer.buffer( + mapper.writeValueAsBytes( + new StartRecordingRequest( + "snapshot", "", "", 0, 0, 0))), + BodyCodec.string()) + .map( + Unchecked.function( + resp -> { + int statusCode = resp.statusCode(); + if (HttpStatusCodeIdentifier.isSuccessCode(statusCode)) { + String body = resp.body(); + return mapper.readValue( + body, + SerializableRecordingDescriptor.class) + .toJmcForm(); + } else if (statusCode == 403) { + throw new ForbiddenException( + new UnsupportedOperationException( + "startSnapshot")); + } else { + throw new AgentApiException(statusCode); + } + })); + } catch (JsonProcessingException e) { + logger.error(e); + return Uni.createFrom().failure(e); + } + } + + Uni updateRecordingOptions(long id, IConstrainedMap newSettings) { + Map settings = new HashMap<>(newSettings.keySet().size()); + for (String key : newSettings.keySet()) { + Object value = newSettings.get(key); + if (value == null) { + continue; + } + if (value instanceof String && StringUtils.isBlank((String) value)) { + continue; + } + settings.put(key, value); + } + + try { + return invoke( + HttpMethod.PATCH, + String.format("/recordings/%d", id), + Buffer.buffer(mapper.writeValueAsBytes(settings)), + BodyCodec.none()) + .map( + resp -> { + int statusCode = resp.statusCode(); + if (HttpStatusCodeIdentifier.isSuccessCode(statusCode)) { + return null; + } else if (statusCode == 403) { + throw new ForbiddenException( + new UnsupportedOperationException( + "updateRecordingOptions")); + } else { + throw new AgentApiException(statusCode); + } + }); + } catch (JsonProcessingException e) { + logger.error(e); + return Uni.createFrom().failure(e); + } + } + + Uni openStream(long id) { + return invoke(HttpMethod.GET, "/recordings/" + id, BodyCodec.buffer()) + .map( + resp -> { + int statusCode = resp.statusCode(); + if (HttpStatusCodeIdentifier.isSuccessCode(statusCode)) { + return resp.body(); + } else if (statusCode == 403) { + throw new ForbiddenException( + new UnsupportedOperationException("openStream")); + } else { + throw new AgentApiException(statusCode); + } + }); + } + + Uni stopRecording(long id) { + // FIXME this is a terrible hack, the interfaces here should not require only an + // IConstrainedMap with IOptionDescriptors but allow us to pass other and more simply + // serializable data to the Agent, such as this recording state entry + IConstrainedMap map = + new IConstrainedMap() { + @Override + public Set keySet() { + return Set.of("state"); + } + + @Override + public Object get(String key) { + return RecordingState.STOPPED.name(); + } + + @Override + public IConstraint getConstraint(String key) { + throw new UnsupportedOperationException(); + } + + @Override + public String getPersistableString(String key) { + throw new UnsupportedOperationException(); + } + + @Override + public IMutableConstrainedMap emptyWithSameConstraints() { + throw new UnsupportedOperationException(); + } + + @Override + public IMutableConstrainedMap mutableCopy() { + throw new UnsupportedOperationException(); + } + }; + return updateRecordingOptions(id, map); + } + + Uni deleteRecording(long id) { + return invoke( + HttpMethod.DELETE, + String.format("/recordings/%d", id), + Buffer.buffer(), + BodyCodec.none()) + .map( + resp -> { + int statusCode = resp.statusCode(); + if (HttpStatusCodeIdentifier.isSuccessCode(statusCode) + || statusCode == 404) { + // if the request succeeded we're OK. We're also OK if the recording + // could not be found - it was probably already deleted, so this is + // not a failure + return null; + } else if (statusCode == 403) { + throw new ForbiddenException( + new UnsupportedOperationException("deleteRecording")); + } else { + throw new AgentApiException(statusCode); + } + }); + } + + Uni> activeRecordings() { + return invoke(HttpMethod.GET, "/recordings/", BodyCodec.string()) + .map(HttpResponse::body) + .map( + s -> { + try { + return mapper.readValue( + s, + new TypeReference< + List>() {}); + } catch (JsonProcessingException e) { + logger.error(e); + return List.of(); + } + }) + .map(arr -> arr.stream().map(SerializableRecordingDescriptor::toJmcForm).toList()); + } + + Uni> eventTypes() { + return invoke(HttpMethod.GET, "/event-types/", BodyCodec.jsonArray()) + .map(HttpResponse::body) + .map(arr -> arr.stream().map(o -> new AgentEventTypeInfo((JsonObject) o)).toList()); + } + + Uni> eventSettings() { + return invoke(HttpMethod.GET, "/event-settings/", BodyCodec.jsonArray()) + .map(HttpResponse::body) + .map( + arr -> { + return arr.stream() + .map( + o -> { + JsonObject json = (JsonObject) o; + String eventName = json.getString("name"); + JsonArray jsonSettings = + json.getJsonArray("settings"); + Map settings = new HashMap<>(); + jsonSettings.forEach( + s -> { + JsonObject j = (JsonObject) s; + settings.put( + j.getString("name"), + j.getString("defaultValue")); + }); + return Pair.of(eventName, settings); + }) + .toList(); + }) + .map( + list -> { + SimpleConstrainedMap result = + new SimpleConstrainedMap(null); + list.forEach( + item -> { + item.getRight() + .forEach( + (key, val) -> { + try { + result.put( + new EventOptionID( + new EventTypeIDV2( + item + .getLeft()), + key), + null, + val); + } catch ( + QuantityConversionException + qce) { + logger.warn( + "Event settings exception", + qce); + } + }); + }); + return result; + }); + } + + Uni> eventTemplates() { + return invoke(HttpMethod.GET, "/event-templates/", BodyCodec.jsonArray()) + .map(HttpResponse::body) + .map(arr -> arr.stream().map(Object::toString).toList()); + } + + private Uni> invoke(HttpMethod mtd, String path, BodyCodec codec) { + return invoke(mtd, path, null, codec); + } + + private Uni> invoke( + HttpMethod mtd, String path, Buffer payload, BodyCodec codec) { + logger.debugv("{0} {1} {2}", mtd, getUri(), path); + HttpRequest req = + webClient + .request(mtd, getUri().getPort(), getUri().getHost(), path) + .ssl("https".equals(getUri().getScheme())) + .timeout(httpTimeout.toMillis()) + .followRedirects(true) + .as(codec); + try { + Credential credential = + credentialsFinder.getCredentialsForConnectUrl(getUri()).orElse(null); + if (credential == null || credential.username == null || credential.password == null) { + throw new InvalidCredentialsException(NULL_CREDENTIALS + " " + getUri()); + } + req = + req.authentication( + new UsernamePasswordCredentials( + credential.username, credential.password)); + } catch (InvalidCredentialsException e) { + logger.error("Authentication exception", e); + throw new IllegalStateException(e); + } + + Uni> uni; + if (payload != null) { + uni = req.sendBuffer(payload); + } else { + uni = req.send(); + } + return uni; + } + + @ApplicationScoped + public static class Factory { + + @Inject ObjectMapper mapper; + @Inject WebClient webClient; + @Inject CredentialsFinder credentialsFinder; + @Inject Logger logger; + + @ConfigProperty(name = ConfigProperties.CONNECTIONS_FAILED_TIMEOUT) + Duration timeout; + + public AgentClient create(Target target) { + return new AgentClient(target, webClient, mapper, timeout, credentialsFinder); + } + } + + private static class AgentEventTypeInfo implements IEventTypeInfo { + + final JsonObject json; + + AgentEventTypeInfo(JsonObject json) { + this.json = json; + } + + @Override + public String getDescription() { + return json.getString("description"); + } + + @Override + public IEventTypeID getEventTypeID() { + return new EventTypeIDV2(json.getString("name")); + } + + @Override + public String[] getHierarchicalCategory() { + return ((List) + json.getJsonArray("categories").getList().stream() + .map(Object::toString) + .toList()) + .toArray(new String[0]); + } + + @Override + public String getName() { + return json.getString("name"); + } + + static V capture(T t) { + // TODO clean up this generics hack + return (V) t; + } + + @Override + public Map> getOptionDescriptors() { + Map> result = new HashMap<>(); + JsonArray settings = json.getJsonArray("settings"); + settings.forEach( + setting -> { + String name = ((JsonObject) setting).getString("name"); + String defaultValue = ((JsonObject) setting).getString("defaultValue"); + result.put( + name, + capture( + new IOptionDescriptor() { + @Override + public String getName() { + return name; + } + + @Override + public String getDescription() { + return ""; + } + + @Override + public IConstraint getConstraint() { + return new IConstraint() { + + @Override + public IConstraint combine( + IConstraint other) { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method 'combine'"); + } + + @Override + public boolean validate(String value) + throws QuantityConversionException { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method 'validate'"); + } + + @Override + public String persistableString(String value) + throws QuantityConversionException { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method" + + " 'persistableString'"); + } + + @Override + public String parsePersisted( + String persistedValue) + throws QuantityConversionException { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method" + + " 'parsePersisted'"); + } + + @Override + public String interactiveFormat(String value) + throws QuantityConversionException { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method" + + " 'interactiveFormat'"); + } + + @Override + public String parseInteractive( + String interactiveValue) + throws QuantityConversionException { + // TODO Auto-generated method stub + throw new UnsupportedOperationException( + "Unimplemented method" + + " 'parseInteractive'"); + } + }; + } + + @Override + public String getDefault() { + return defaultValue; + } + })); + }); + return result; + } + + @Override + public IOptionDescriptor getOptionInfo(String s) { + return getOptionDescriptors().get(s); + } + } +} diff --git a/src/main/java/io/cryostat/targets/AgentConnection.java b/src/main/java/io/cryostat/targets/AgentConnection.java index 44218a4fe..1feeeb868 100644 --- a/src/main/java/io/cryostat/targets/AgentConnection.java +++ b/src/main/java/io/cryostat/targets/AgentConnection.java @@ -17,141 +17,116 @@ import java.io.IOException; import java.net.URI; -import java.util.List; -import java.util.Map; -import java.util.Optional; +import java.util.Set; import javax.management.InstanceNotFoundException; import javax.management.IntrospectionException; import javax.management.ReflectionException; import javax.management.remote.JMXServiceURL; -import org.openjdk.jmc.common.unit.IConstrainedMap; -import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID; import org.openjdk.jmc.rjmx.ConnectionException; import org.openjdk.jmc.rjmx.IConnectionHandle; import org.openjdk.jmc.rjmx.ServiceNotAvailableException; -import io.cryostat.core.FlightRecorderException; import io.cryostat.core.JvmIdentifier; import io.cryostat.core.net.CryostatFlightRecorderService; import io.cryostat.core.net.IDException; import io.cryostat.core.net.JFRConnection; import io.cryostat.core.net.MBeanMetrics; -import io.cryostat.core.net.MemoryMetrics; -import io.cryostat.core.net.OperatingSystemMetrics; -import io.cryostat.core.net.RuntimeMetrics; -import io.cryostat.core.net.ThreadMetrics; import io.cryostat.core.sys.Clock; -import io.cryostat.core.templates.Template; +import io.cryostat.core.templates.RemoteTemplateService; import io.cryostat.core.templates.TemplateService; -import io.cryostat.core.templates.TemplateType; -import io.vertx.mutiny.ext.web.client.WebClient; -import org.jsoup.nodes.Document; +import io.smallrye.common.annotation.Blocking; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import org.jboss.logging.Logger; class AgentConnection implements JFRConnection { - private final URI agentUri; - private final WebClient webClient; - private final Clock clock; + private final AgentClient client; + private final Logger logger = Logger.getLogger(getClass()); - AgentConnection(URI agentUri, WebClient webClient, Clock clock) { - this.agentUri = agentUri; - this.webClient = webClient; - this.clock = clock; + AgentConnection(AgentClient client) { + this.client = client; } @Override public void close() throws Exception {} + @Blocking @Override public void connect() throws ConnectionException { - // TODO test connection by pinging agent callback + if (!client.ping().await().atMost(client.getTimeout())) { + throw new ConnectionException("Connection failed"); + } } @Override public void disconnect() {} + public URI getUri() { + return client.getUri(); + } + @Override - public long getApproximateServerTime(Clock arg0) { + public long getApproximateServerTime(Clock clock) { return clock.now().toEpochMilli(); } @Override public IConnectionHandle getHandle() throws ConnectionException, IOException { - // TODO Auto-generated method stub - return null; + throw new UnsupportedOperationException(); } @Override public String getHost() { - return agentUri.getHost(); + return getUri().getHost(); } - @Override - public JMXServiceURL getJMXURL() throws IOException { - // TODO Auto-generated method stub - return null; + public static boolean isAgentConnection(URI uri) { + return Set.of("http", "https", "cryostat-agent").contains(uri.getScheme()); } @Override - public String getJvmId() throws IDException, IOException { - // this should have already been populated when the agent published itself to the Discovery - // API. If not, then this will fail, but we were in a bad state to begin with. - return Target.getTargetByConnectUrl(agentUri).jvmId; + public JMXServiceURL getJMXURL() throws IOException { + if (!isAgentConnection(getUri())) { + throw new UnsupportedOperationException(); + } + return new JMXServiceURL(getUri().toString()); } @Override public JvmIdentifier getJvmIdentifier() throws IDException, IOException { - // try { - // return JvmIdentifier.from(getMBeanMetrics().getRuntime()); - // } catch (IntrospectionException | InstanceNotFoundException | ReflectionException e) { - // throw new IDException(e); - // } - throw new UnsupportedOperationException("Unimplemented method 'getJvmIdentifier'"); + try { + return JvmIdentifier.from(getMBeanMetrics().getRuntime()); + } catch (IntrospectionException | InstanceNotFoundException | ReflectionException e) { + throw new IDException(e); + } } @Override public int getPort() { - return agentUri.getPort(); + return getUri().getPort(); } @Override public CryostatFlightRecorderService getService() throws ConnectionException, IOException, ServiceNotAvailableException { - return new AgentJFRService(webClient); + return new AgentJFRService(client, getTemplateService()); } @Override public TemplateService getTemplateService() { - return new TemplateService() { - - @Override - public Optional> getEvents( - String name, TemplateType type) throws FlightRecorderException { - return Optional.empty(); - } - - @Override - public List