diff --git a/compose/cryostat.yml b/compose/cryostat.yml index c42fb185d..19aeaa863 100644 --- a/compose/cryostat.yml +++ b/compose/cryostat.yml @@ -28,9 +28,9 @@ services: QUARKUS_HTTP_HOST: "cryostat" QUARKUS_HTTP_PORT: ${CRYOSTAT_HTTP_PORT} QUARKUS_HIBERNATE_ORM_LOG_SQL: "true" - CRYOSTAT_DISCOVERY_JDP_ENABLED: "true" - CRYOSTAT_DISCOVERY_PODMAN_ENABLED: "true" - CRYOSTAT_DISCOVERY_DOCKER_ENABLED: "true" + CRYOSTAT_DISCOVERY_JDP_ENABLED: ${CRYOSTAT_DISCOVERY_JDP_ENABLED:-true} + CRYOSTAT_DISCOVERY_PODMAN_ENABLED: ${CRYOSTAT_DISCOVERY_PODMAN_ENABLED:-true} + CRYOSTAT_DISCOVERY_DOCKER_ENABLED: ${CRYOSTAT_DISCOVERY_DOCKER_ENABLED:-true} JAVA_OPTS_APPEND: "-XX:+FlightRecorder -XX:StartFlightRecording=name=onstart,settings=default,disk=true,maxage=5m -XX:StartFlightRecording=name=startup,settings=profile,disk=true,duration=30s -Dcom.sun.management.jmxremote.autodiscovery=true -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9091 -Dcom.sun.management.jmxremote.rmi.port=9091 -Djava.rmi.server.hostname=127.0.0.1 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.local.only=false" restart: unless-stopped healthcheck: diff --git a/src/main/java/io/cryostat/JsonRequestFilter.java b/src/main/java/io/cryostat/JsonRequestFilter.java index c8334fab2..836d95aee 100644 --- a/src/main/java/io/cryostat/JsonRequestFilter.java +++ b/src/main/java/io/cryostat/JsonRequestFilter.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.util.Set; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -31,12 +32,17 @@ @Provider public class JsonRequestFilter implements ContainerRequestFilter { + static final Set disallowedFields = Set.of("id"); + static final Set allowedPaths = + Set.of("/api/v2.2/graphql", "/api/v3/graphql", "/api/v2.2/discovery"); + private final ObjectMapper objectMapper = new ObjectMapper(); @Override public void filter(ContainerRequestContext requestContext) throws IOException { if (requestContext.getMediaType() != null - && requestContext.getMediaType().isCompatible(MediaType.APPLICATION_JSON_TYPE)) { + && requestContext.getMediaType().isCompatible(MediaType.APPLICATION_JSON_TYPE) + && !allowedPaths.contains(requestContext.getUriInfo().getPath())) { try (InputStream stream = requestContext.getEntityStream()) { JsonNode rootNode = objectMapper.readTree(stream); @@ -56,8 +62,10 @@ public void filter(ContainerRequestContext requestContext) throws IOException { } private boolean containsIdField(JsonNode node) { - if (node.has("id")) { - return true; + for (String field : disallowedFields) { + if (node.has(field)) { + return true; + } } if (node.isContainerNode()) { for (JsonNode child : node) { diff --git a/src/main/java/io/cryostat/graphql/ActiveRecordings.java b/src/main/java/io/cryostat/graphql/ActiveRecordings.java index ff011c236..efd1bc997 100644 --- a/src/main/java/io/cryostat/graphql/ActiveRecordings.java +++ b/src/main/java/io/cryostat/graphql/ActiveRecordings.java @@ -15,8 +15,10 @@ */ package io.cryostat.graphql; +import java.time.Duration; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.function.Predicate; @@ -25,6 +27,8 @@ import io.cryostat.core.templates.Template; import io.cryostat.core.templates.TemplateType; +import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; import io.cryostat.graphql.TargetNodes.AggregateInfo; import io.cryostat.graphql.TargetNodes.Recordings; import io.cryostat.graphql.matchers.LabelSelectorMatcher; @@ -32,42 +36,217 @@ import io.cryostat.recordings.RecordingHelper; import io.cryostat.recordings.RecordingHelper.RecordingOptions; import io.cryostat.recordings.RecordingHelper.RecordingReplace; -import io.cryostat.recordings.Recordings.Metadata; +import io.cryostat.recordings.Recordings.ArchivedRecording; import io.cryostat.targets.Target; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import io.smallrye.common.annotation.Blocking; import io.smallrye.graphql.api.Nullable; +import io.smallrye.graphql.execution.ExecutionException; import io.smallrye.mutiny.Uni; import jakarta.inject.Inject; import jakarta.transaction.Transactional; import jdk.jfr.RecordingState; import org.eclipse.microprofile.graphql.Description; import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Mutation; import org.eclipse.microprofile.graphql.NonNull; import org.eclipse.microprofile.graphql.Source; +import org.jboss.logging.Logger; @GraphQLApi public class ActiveRecordings { @Inject RecordingHelper recordingHelper; + @Inject Logger logger; + + @Blocking + @Transactional + @Mutation + @Description( + "Start a new Flight Recording on all Targets under the subtrees of the discovery nodes" + + " matching the given filter") + public List createRecording( + @NonNull DiscoveryNodeFilter nodes, @NonNull RecordingSettings recording) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .map( + target -> { + var template = + recordingHelper.getPreferredTemplate( + target, + recording.template, + TemplateType.valueOf(recording.templateType)); + try { + return recordingHelper + .startRecording( + target, + Optional.ofNullable(recording.replace) + .map(RecordingReplace::valueOf) + .orElse(RecordingReplace.STOPPED), + template, + recording.asOptions(), + Optional.ofNullable(recording.metadata) + .map(s -> s.labels) + .orElse(Map.of())) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (QuantityConversionException qce) { + throw new ExecutionException(qce); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Archive an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List archiveRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter(r -> recordings == null || recordings.test(r))) + .map( + recording -> { + try { + return recordingHelper.archiveRecording(recording, null, null); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Stop an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List stopRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter(r -> recordings == null || recordings.test(r))) + .map( + recording -> { + try { + return recordingHelper + .stopRecording(recording) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Delete an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List deleteRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + var activeRecordings = + DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null) + .stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter( + r -> + recordings == null + || recordings.test(r))) + .toList(); + return activeRecordings.stream() + .map( + recording -> { + try { + return recordingHelper + .deleteRecording(recording) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Create a Flight Recorder Snapshot on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List createSnapshot(@NonNull DiscoveryNodeFilter nodes) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .map( + target -> { + try { + return recordingHelper + .createSnapshot(target) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } @Blocking @Transactional @Description("Start a new Flight Recording on the specified Target") public Uni doStartRecording( - @Source Target target, @NonNull RecordingSettings settings) + @Source Target target, @NonNull RecordingSettings recording) throws QuantityConversionException { var fTarget = Target.findById(target.id); Template template = recordingHelper.getPreferredTemplate( - fTarget, settings.template, settings.templateType); + fTarget, recording.template, TemplateType.valueOf(recording.templateType)); return recordingHelper.startRecording( fTarget, - RecordingReplace.STOPPED, + Optional.ofNullable(recording.replace) + .map(RecordingReplace::valueOf) + .orElse(RecordingReplace.STOPPED), template, - settings.asOptions(), - settings.metadata.labels()); + recording.asOptions(), + Optional.ofNullable(recording.metadata).map(s -> s.labels).orElse(Map.of())); } @Blocking @@ -78,6 +257,30 @@ public Uni doSnapshot(@Source Target target) { return recordingHelper.createSnapshot(fTarget); } + @Blocking + @Transactional + @Description("Stop the specified Flight Recording") + public Uni doStop(@Source ActiveRecording recording) { + var ar = ActiveRecording.findById(recording.id); + return recordingHelper.stopRecording(ar); + } + + @Blocking + @Transactional + @Description("Delete the specified Flight Recording") + public Uni doDelete(@Source ActiveRecording recording) { + var ar = ActiveRecording.findById(recording.id); + return recordingHelper.deleteRecording(ar); + } + + @Blocking + @Transactional + @Description("Archive the specified Flight Recording") + public Uni doArchive(@Source ActiveRecording recording) throws Exception { + var ar = ActiveRecording.findById(recording.id); + return Uni.createFrom().item(recordingHelper.archiveRecording(ar, null, null)); + } + public TargetNodes.ActiveRecordings active( @Source Recordings recordings, ActiveRecordingsFilter filter) { var out = new TargetNodes.ActiveRecordings(); @@ -98,15 +301,15 @@ public TargetNodes.ActiveRecordings active( public static class RecordingSettings { public @NonNull String name; public @NonNull String template; - public @NonNull TemplateType templateType; - public @Nullable RecordingReplace replace; + public @NonNull String templateType; + public @Nullable String replace; public @Nullable Boolean continuous; public @Nullable Boolean archiveOnStop; public @Nullable Boolean toDisk; public @Nullable Long duration; public @Nullable Long maxSize; public @Nullable Long maxAge; - public @Nullable Metadata metadata; + public @Nullable RecordingMetadata metadata; public RecordingOptions asOptions() { return new RecordingOptions( @@ -119,6 +322,11 @@ public RecordingOptions asOptions() { } } + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class RecordingMetadata { + public @Nullable Map labels; + } + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class ActiveRecordingsFilter implements Predicate { public @Nullable String name; @@ -161,16 +369,19 @@ public boolean test(ActiveRecording r) { Predicate matchesStartTimeBefore = n -> startTimeMsBeforeEqual == null || startTimeMsBeforeEqual <= n.startTime; - return matchesName - .and(matchesNames) - .and(matchesLabels) - .and(matchesState) - .and(matchesContinuous) - .and(matchesToDisk) - .and(matchesDurationGte) - .and(matchesDurationLte) - .and(matchesStartTimeBefore) - .and(matchesStartTimeAfter) + return List.of( + matchesName, + matchesNames, + matchesLabels, + matchesState, + matchesContinuous, + matchesToDisk, + matchesDurationGte, + matchesDurationLte, + matchesStartTimeBefore, + matchesStartTimeAfter) + .stream() + .reduce(x -> true, Predicate::and) .test(r); } } diff --git a/src/main/java/io/cryostat/graphql/ArchivedRecordings.java b/src/main/java/io/cryostat/graphql/ArchivedRecordings.java index eec113d24..afcf36f77 100644 --- a/src/main/java/io/cryostat/graphql/ArchivedRecordings.java +++ b/src/main/java/io/cryostat/graphql/ArchivedRecordings.java @@ -43,10 +43,13 @@ public class ArchivedRecordings { @Query("archivedRecordings") public TargetNodes.ArchivedRecordings listArchivedRecordings(ArchivedRecordingsFilter filter) { var r = new TargetNodes.ArchivedRecordings(); - r.data = recordingHelper.listArchivedRecordings(); + r.data = + recordingHelper + .listArchivedRecordings(filter == null ? null : filter.sourceTarget) + .stream() + .filter(filter) + .toList(); r.aggregate = AggregateInfo.fromArchived(r.data); - r.aggregate.size = r.data.stream().mapToLong(ArchivedRecording::size).sum(); - r.aggregate.count = r.data.size(); return r; } @@ -109,14 +112,17 @@ public boolean test(ArchivedRecording r) { archivedTimeBeforeEqual == null || archivedTimeBeforeEqual <= n.archivedTime(); - return matchesName - .and(matchesNames) - .and(matchesSourceTarget) - .and(matchesLabels) - .and(matchesSizeGte) - .and(matchesSizeLte) - .and(matchesArchivedTimeGte) - .and(matchesArchivedTimeLte) + return List.of( + matchesName, + matchesNames, + matchesSourceTarget, + matchesLabels, + matchesSizeGte, + matchesSizeLte, + matchesArchivedTimeGte, + matchesArchivedTimeLte) + .stream() + .reduce(x -> true, Predicate::and) .test(r); } } diff --git a/src/main/java/io/cryostat/graphql/EnvironmentNodes.java b/src/main/java/io/cryostat/graphql/EnvironmentNodes.java index bb0d0422e..d70859989 100644 --- a/src/main/java/io/cryostat/graphql/EnvironmentNodes.java +++ b/src/main/java/io/cryostat/graphql/EnvironmentNodes.java @@ -15,12 +15,10 @@ */ package io.cryostat.graphql; -import java.util.ArrayList; import java.util.List; -import java.util.Objects; import io.cryostat.discovery.DiscoveryNode; -import io.cryostat.graphql.matchers.LabelSelectorMatcher; +import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; import io.smallrye.graphql.api.Nullable; import org.eclipse.microprofile.graphql.Description; @@ -30,51 +28,12 @@ @GraphQLApi public class EnvironmentNodes { - public static class EnvironmentNodeFilterInput { - @Nullable public Long id; - @Nullable public String name; - @Nullable public List names; - @Nullable public String nodeType; - @Nullable public List labels; - } - @Query("environmentNodes") @Description("Get all environment nodes in the discovery tree with optional filtering") - public List environmentNodes(EnvironmentNodeFilterInput filter) { - DiscoveryNode rootNode = DiscoveryNode.getUniverse(); - return filterAndTraverse(rootNode, filter); - } - - private List filterAndTraverse( - DiscoveryNode node, EnvironmentNodeFilterInput filter) { - List filteredNodes = new ArrayList<>(); - if (matchesFilter(node, filter)) { - filteredNodes.add(node); - } - if (node.children != null) { - for (DiscoveryNode child : node.children) { - filteredNodes.addAll(filterAndTraverse(child, filter)); - } - } - return filteredNodes; - } - - private static boolean matchesFilter(DiscoveryNode node, EnvironmentNodeFilterInput filter) { - if (node.target != null) return false; - if (filter == null) return true; - - boolean matchesId = filter.id == null || filter.id.equals(node.id); - boolean matchesName = filter.name == null || Objects.equals(filter.name, node.name); - boolean matchesNames = filter.names == null || filter.names.contains(node.name); - boolean matchesLabels = - filter.labels == null - || filter.labels.stream() - .allMatch( - label -> - LabelSelectorMatcher.parse(label) - .test(node.labels)); - boolean matchesNodeType = filter.nodeType == null || filter.nodeType.equals(node.nodeType); - - return matchesId && matchesName && matchesNames && matchesLabels && matchesNodeType; + public List environmentNodes(@Nullable DiscoveryNodeFilter filter) { + return RootNode.recurseChildren(DiscoveryNode.getUniverse(), node -> node.target == null) + .stream() + .filter(filter) + .toList(); } } diff --git a/src/main/java/io/cryostat/graphql/RootNode.java b/src/main/java/io/cryostat/graphql/RootNode.java index d74f77c27..ba5f5e74e 100644 --- a/src/main/java/io/cryostat/graphql/RootNode.java +++ b/src/main/java/io/cryostat/graphql/RootNode.java @@ -17,7 +17,6 @@ import java.util.HashSet; import java.util.List; -import java.util.Objects; import java.util.Set; import java.util.function.Predicate; @@ -53,7 +52,7 @@ public List descendantTargets( .toList(); } - private Set recurseChildren( + static Set recurseChildren( DiscoveryNode node, Predicate predicate) { Set result = new HashSet<>(); if (predicate.test(node)) { @@ -68,17 +67,21 @@ private Set recurseChildren( @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public static class DiscoveryNodeFilter implements Predicate { public @Nullable Long id; + public @Nullable List ids; public @Nullable String name; public @Nullable List names; + public @Nullable List nodeTypes; public @Nullable List labels; public @Nullable List annotations; @Override public boolean test(DiscoveryNode t) { Predicate matchesId = n -> id == null || id.equals(n.id); - Predicate matchesName = - n -> name == null || Objects.equals(name, n.name); + Predicate matchesIds = n -> ids == null || ids.contains(n.id); + Predicate matchesName = n -> name == null || name.equals(n.name); Predicate matchesNames = n -> names == null || names.contains(n.name); + Predicate matchesNodeTypes = + n -> nodeTypes == null || nodeTypes.contains(n.nodeType); Predicate matchesLabels = n -> labels == null @@ -98,11 +101,16 @@ public boolean test(DiscoveryNode t) { n.target.annotations .merged())); - return matchesId - .and(matchesName) - .and(matchesNames) - .and(matchesLabels) - .and(matchesAnnotations) + return List.of( + matchesId, + matchesIds, + matchesName, + matchesNames, + matchesNodeTypes, + matchesLabels, + matchesAnnotations) + .stream() + .reduce(x -> true, Predicate::and) .test(t); } } diff --git a/src/main/java/io/cryostat/graphql/SchemaExtension.java b/src/main/java/io/cryostat/graphql/SchemaExtension.java new file mode 100644 index 000000000..c330a057e --- /dev/null +++ b/src/main/java/io/cryostat/graphql/SchemaExtension.java @@ -0,0 +1,54 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.Arrays; + +import graphql.schema.GraphQLEnumType; +import graphql.schema.GraphQLEnumValueDefinition; +import graphql.schema.GraphQLSchema; +import jakarta.enterprise.event.Observes; +import jdk.jfr.RecordingState; +import org.eclipse.microprofile.graphql.GraphQLApi; + +@GraphQLApi +public class SchemaExtension { + + public GraphQLSchema.Builder registerRecordingStateEnum( + @Observes GraphQLSchema.Builder builder) { + return createEnumType( + builder, RecordingState.class, "Running state of an active Flight Recording"); + } + + private static GraphQLSchema.Builder createEnumType( + GraphQLSchema.Builder builder, Class> klazz, String description) { + return builder.additionalType( + GraphQLEnumType.newEnum() + .name(klazz.getSimpleName()) + .description(description) + .values( + Arrays.asList(klazz.getEnumConstants()).stream() + .map( + s -> + new GraphQLEnumValueDefinition.Builder() + .name(s.name()) + .value(s) + .description(s.name()) + .build()) + .toList()) + .build()); + } +} diff --git a/src/main/java/io/cryostat/graphql/TargetNodes.java b/src/main/java/io/cryostat/graphql/TargetNodes.java index 5648c3b22..321653955 100644 --- a/src/main/java/io/cryostat/graphql/TargetNodes.java +++ b/src/main/java/io/cryostat/graphql/TargetNodes.java @@ -15,12 +15,13 @@ */ package io.cryostat.graphql; -import java.util.Arrays; import java.util.List; import io.cryostat.core.net.JFRConnection; import io.cryostat.core.net.MBeanMetrics; import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.ActiveRecordings.ActiveRecordingsFilter; +import io.cryostat.graphql.ArchivedRecordings.ArchivedRecordingsFilter; import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; import io.cryostat.recordings.ActiveRecording; import io.cryostat.recordings.RecordingHelper; @@ -30,15 +31,11 @@ import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import graphql.schema.DataFetchingEnvironment; -import graphql.schema.GraphQLEnumType; -import graphql.schema.GraphQLEnumValueDefinition; -import graphql.schema.GraphQLSchema; import io.smallrye.common.annotation.Blocking; import io.smallrye.graphql.api.Context; +import io.smallrye.graphql.api.Nullable; import io.smallrye.mutiny.Uni; -import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; -import jdk.jfr.RecordingState; import org.eclipse.microprofile.graphql.Description; import org.eclipse.microprofile.graphql.GraphQLApi; import org.eclipse.microprofile.graphql.NonNull; @@ -51,31 +48,6 @@ public class TargetNodes { @Inject RecordingHelper recordingHelper; @Inject TargetConnectionManager connectionManager; - public GraphQLSchema.Builder registerRecordingStateEnum( - @Observes GraphQLSchema.Builder builder) { - return createEnumType( - builder, RecordingState.class, "Running state of an active Flight Recording"); - } - - private static GraphQLSchema.Builder createEnumType( - GraphQLSchema.Builder builder, Class> klazz, String description) { - return builder.additionalType( - GraphQLEnumType.newEnum() - .name(klazz.getSimpleName()) - .description(description) - .values( - Arrays.asList(klazz.getEnumConstants()).stream() - .map( - s -> - new GraphQLEnumValueDefinition.Builder() - .name(s.name()) - .value(s) - .description(s.name()) - .build()) - .toList()) - .build()); - } - @Blocking @Query("targetNodes") @Description("Get the Target discovery nodes, i.e. the leaf nodes of the discovery tree") @@ -98,9 +70,36 @@ public List getTargetNodes(DiscoveryNodeFilter filter) { // return t -> observed.add(fn.apply(t)); // } + @Blocking + public ActiveRecordings activeRecordings( + @Source Target target, @Nullable ActiveRecordingsFilter filter) { + var fTarget = Target.findById(target.id); + var recordings = new ActiveRecordings(); + recordings.data = + fTarget.activeRecordings.stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromActive(recordings.data); + return recordings; + } + + @Blocking + public ArchivedRecordings archivedRecordings( + @Source Target target, @Nullable ArchivedRecordingsFilter filter) { + var fTarget = Target.findById(target.id); + var recordings = new ArchivedRecordings(); + recordings.data = + recordingHelper.listArchivedRecordings(fTarget).stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromArchived(recordings.data); + return recordings; + } + @Blocking @Description("Get the active and archived recordings belonging to this target") public Recordings recordings(@Source Target target, Context context) { + var fTarget = Target.findById(target.id); var dfe = context.unwrap(DataFetchingEnvironment.class); var requestedFields = dfe.getSelectionSet().getFields().stream().map(field -> field.getName()).toList(); @@ -109,17 +108,14 @@ public Recordings recordings(@Source Target target, Context context) { if (requestedFields.contains("active")) { recordings.active = new ActiveRecordings(); - recordings.active.data = target.activeRecordings; + recordings.active.data = fTarget.activeRecordings; recordings.active.aggregate = AggregateInfo.fromActive(recordings.active.data); } if (requestedFields.contains("archived")) { recordings.archived = new ArchivedRecordings(); - recordings.archived.data = recordingHelper.listArchivedRecordings(target); + recordings.archived.data = recordingHelper.listArchivedRecordings(fTarget); recordings.archived.aggregate = AggregateInfo.fromArchived(recordings.archived.data); - recordings.archived.aggregate.count = recordings.archived.data.size(); - recordings.archived.aggregate.size = - recordings.archived.data.stream().mapToLong(ArchivedRecording::size).sum(); } return recordings; @@ -128,7 +124,8 @@ public Recordings recordings(@Source Target target, Context context) { @Blocking @Description("Get live MBean metrics snapshot from the specified Target") public Uni mbeanMetrics(@Source Target target) { - return connectionManager.executeConnectedTaskUni(target, JFRConnection::getMBeanMetrics); + var fTarget = Target.findById(target.id); + return connectionManager.executeConnectedTaskUni(fTarget, JFRConnection::getMBeanMetrics); } @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") diff --git a/src/main/java/io/cryostat/recordings/RecordingHelper.java b/src/main/java/io/cryostat/recordings/RecordingHelper.java index b12a8fc81..1e60de209 100644 --- a/src/main/java/io/cryostat/recordings/RecordingHelper.java +++ b/src/main/java/io/cryostat/recordings/RecordingHelper.java @@ -87,6 +87,7 @@ import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.transaction.Transactional; import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.ServerErrorException; import jdk.jfr.RecordingState; @@ -195,6 +196,7 @@ void onStart(@Observes StartupEvent evt) { } } + @Transactional public Uni startRecording( Target target, RecordingReplace replace, @@ -265,7 +267,7 @@ public Uni startRecording( }); } - @Blocking + @Transactional public Uni createSnapshot(Target target) { return connectionManager.executeConnectedTaskUni( target, @@ -331,6 +333,20 @@ public Uni createSnapshot(Target target) { }); } + @Transactional + public Uni stopRecording(ActiveRecording recording) { + recording.state = RecordingState.STOPPED; + recording.persist(); + return Uni.createFrom().item(recording); + } + + @Transactional + public Uni deleteRecording(ActiveRecording recording) { + recording.delete(); + recording.persist(); + return Uni.createFrom().item(recording); + } + @Blocking private boolean snapshotIsReadable(Target target, InputStream snapshot) throws IOException { if (!connectionManager.markConnectionInUse(target)) { @@ -568,13 +584,12 @@ public List listArchivedRecordingObjects(String jvmId) { } @Blocking - public List listArchivedRecordings(Target target) { - return listArchivedRecordingObjects(target.jvmId).stream() + public List listArchivedRecordings(String jvmId) { + return listArchivedRecordingObjects(jvmId).stream() .map( item -> { String path = item.key().strip(); String[] parts = path.split("/"); - String jvmId = parts[0]; String filename = parts[1]; Metadata metadata = getArchivedRecordingMetadata(jvmId, filename) @@ -590,34 +605,31 @@ public List listArchivedRecordings(Target target) { .toList(); } - public String saveRecording(ActiveRecording recording) throws Exception { - return saveRecording(recording, null); - } - - public String saveRecording(ActiveRecording recording, Instant expiry) throws Exception { - return saveRecording(recording, null, expiry); + @Blocking + public List listArchivedRecordings(Target target) { + return listArchivedRecordings(target.jvmId); } - @Blocking - public String saveRecording(ActiveRecording recording, String savename, Instant expiry) - throws Exception { + public ArchivedRecording archiveRecording( + ActiveRecording activeRecording, String savename, Instant expiry) throws Exception { // AWS object key name guidelines advise characters to avoid (% so we should not pass url // encoded characters) String transformedAlias = - URLDecoder.decode(recording.target.alias, StandardCharsets.UTF_8) + URLDecoder.decode(activeRecording.target.alias, StandardCharsets.UTF_8) .replaceAll("[\\._/]+", "-"); - String timestamp = - clock.now().truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", ""); + Instant now = clock.now(); + String timestamp = now.truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", ""); String filename = - String.format("%s_%s_%s.jfr", transformedAlias, recording.name, timestamp); + String.format("%s_%s_%s.jfr", transformedAlias, activeRecording.name, timestamp); if (StringUtils.isBlank(savename)) { savename = filename; } int mib = 1024 * 1024; - String key = archivedRecordingKey(recording.target.jvmId, filename); + String key = archivedRecordingKey(activeRecording.target.jvmId, filename); String multipartId = null; List> parts = new ArrayList<>(); - try (var stream = remoteRecordingStreamFactory.open(recording); + long accum = 0; + try (var stream = remoteRecordingStreamFactory.open(activeRecording); var ch = Channels.newChannel(stream)) { ByteBuffer buf = ByteBuffer.allocate(20 * mib); CreateMultipartUploadRequest.Builder builder = @@ -627,14 +639,13 @@ public String saveRecording(ActiveRecording recording, String savename, Instant .contentType(JFR_MIME) .contentDisposition( String.format("attachment; filename=\"%s\"", savename)) - .tagging(createActiveRecordingTagging(recording, expiry)); + .tagging(createActiveRecordingTagging(activeRecording, expiry)); if (expiry != null && expiry.isAfter(Instant.now())) { builder = builder.expires(expiry); } CreateMultipartUploadRequest request = builder.build(); multipartId = storage.createMultipartUpload(request).uploadId(); int read = 0; - long accum = 0; for (int i = 1; i <= 10_000; i++) { read = ch.read(buf); @@ -715,13 +726,19 @@ public String saveRecording(ActiveRecording recording, String savename, Instant var event = new ActiveRecordingEvent( Recordings.RecordingEventCategory.ACTIVE_SAVED, - ActiveRecordingEvent.Payload.of(this, recording)); + ActiveRecordingEvent.Payload.of(this, activeRecording)); bus.publish(event.category().category(), event.payload().recording()); bus.publish( MessagingServer.class.getName(), new Notification(event.category().category(), event.payload())); } - return filename; + return new ArchivedRecording( + filename, + downloadUrl(activeRecording.target.jvmId, filename), + reportUrl(activeRecording.target.jvmId, filename), + activeRecording.metadata, + accum, + now.getEpochSecond()); } public Optional getArchivedRecordingMetadata(String jvmId, String filename) { diff --git a/src/main/java/io/cryostat/recordings/Recordings.java b/src/main/java/io/cryostat/recordings/Recordings.java index 23c529370..0ab21388f 100644 --- a/src/main/java/io/cryostat/recordings/Recordings.java +++ b/src/main/java/io/cryostat/recordings/Recordings.java @@ -171,6 +171,8 @@ public Map upload( if (rawLabels != null) { rawLabels.getMap().forEach((k, v) -> labels.put(k, v.toString())); } + labels.put("jvmId", "uploads"); + labels.put("connectUrl", "uploads"); Metadata metadata = new Metadata(labels); return doUpload(recording, metadata, "uploads"); } @@ -403,6 +405,43 @@ public Collection listFsArchives() { return map.values(); } + @GET + @Blocking + @Path("/api/beta/fs/recordings/{jvmId}") + @RolesAllowed("read") + public Collection listFsArchives(@RestPath String jvmId) { + var map = new HashMap(); + recordingHelper + .listArchivedRecordingObjects(jvmId) + .forEach( + item -> { + String filename = item.key().strip().replace(jvmId + "/", ""); + + Metadata metadata = + recordingHelper + .getArchivedRecordingMetadata(jvmId, filename) + .orElseGet(Metadata::empty); + + String connectUrl = + metadata.labels.computeIfAbsent("connectUrl", k -> jvmId); + var dir = + map.computeIfAbsent( + jvmId, + id -> + new ArchivedRecordingDirectory( + connectUrl, id, new ArrayList<>())); + dir.recordings.add( + new ArchivedRecording( + filename, + recordingHelper.downloadUrl(jvmId, filename), + recordingHelper.reportUrl(jvmId, filename), + metadata, + item.size(), + item.lastModified().getEpochSecond())); + }); + return map.values(); + } + @GET @Path("/api/v3/targets/{id}/recordings") @RolesAllowed("read") @@ -439,8 +478,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod ActiveRecording activeRecording = recording.get(); switch (body.toLowerCase()) { case "stop": - activeRecording.state = RecordingState.STOPPED; - activeRecording.persist(); + recordingHelper.stopRecording(activeRecording).await().indefinitely(); return null; case "save": try { @@ -450,7 +488,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod // completes before sending a response - it should be async. Here we should just // return an Accepted response, and if a failure occurs that should be indicated // as a websocket notification. - return recordingHelper.saveRecording(activeRecording); + return recordingHelper.archiveRecording(activeRecording, null, null).name(); } catch (IOException ioe) { logger.warn(ioe); return null; @@ -622,7 +660,7 @@ void stopRecording(long id, boolean archive) { recording.state = RecordingState.STOPPED; recording.persist(); if (archive) { - recordingHelper.saveRecording(recording); + recordingHelper.archiveRecording(recording, null, null); } } catch (Exception e) { logger.error("couldn't update recording", e); @@ -681,7 +719,7 @@ public void deleteRecording(@RestPath long targetId, @RestPath long remoteId) th .filter(r -> r.remoteId == remoteId) .findFirst() .ifPresentOrElse( - ActiveRecording::delete, + recordingHelper::deleteRecording, () -> { throw new NotFoundException(); }); @@ -945,8 +983,10 @@ public Response handleActiveDownload(@RestPath long id) throws Exception { String savename = recording.name; String filename = - recordingHelper.saveRecording( - recording, savename, Instant.now().plus(transientArchivesTtl)); + recordingHelper + .archiveRecording( + recording, savename, Instant.now().plus(transientArchivesTtl)) + .name(); String encodedKey = recordingHelper.encodedKey(recording.target.jvmId, filename); if (!savename.endsWith(".jfr")) { savename += ".jfr"; diff --git a/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java b/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java index 742dc099e..136eee5e2 100644 --- a/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java +++ b/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java @@ -91,7 +91,7 @@ void initPreviousRecordings(Target target, Rule rule, Queue previousReco @Transactional void performArchival(ActiveRecording recording, Queue previousRecordings) throws Exception { - String filename = recordingHelper.saveRecording(recording); + String filename = recordingHelper.archiveRecording(recording, null, null).name(); previousRecordings.add(filename); } diff --git a/src/test/java/io/cryostat/JsonRequestFilterTest.java b/src/test/java/io/cryostat/JsonRequestFilterTest.java index d2668cde0..0540104a0 100644 --- a/src/test/java/io/cryostat/JsonRequestFilterTest.java +++ b/src/test/java/io/cryostat/JsonRequestFilterTest.java @@ -25,9 +25,11 @@ import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriInfo; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mockito; public class JsonRequestFilterTest { private JsonRequestFilter filter; @@ -76,6 +78,9 @@ private void simulateRequest(String jsonPayload) throws Exception { new ByteArrayInputStream(jsonPayload.getBytes(StandardCharsets.UTF_8)); when(requestContext.getEntityStream()).thenReturn(payloadStream); when(requestContext.getMediaType()).thenReturn(MediaType.APPLICATION_JSON_TYPE); + UriInfo uriInfo = Mockito.mock(UriInfo.class); + when(uriInfo.getPath()).thenReturn("/api/v3/rules"); + when(requestContext.getUriInfo()).thenReturn(uriInfo); filter.filter(requestContext); } }