diff --git a/compose/cryostat.yml b/compose/cryostat.yml
index a1f4145e6..0ce57a170 100644
--- a/compose/cryostat.yml
+++ b/compose/cryostat.yml
@@ -26,9 +26,11 @@ services:
io.cryostat.jmxPort: "0"
io.cryostat.jmxUrl: "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"
environment:
+ QUARKUS_LOG_LEVEL: TRACE
QUARKUS_HTTP_HOST: "cryostat"
QUARKUS_HTTP_PORT: ${CRYOSTAT_HTTP_PORT}
QUARKUS_HIBERNATE_ORM_LOG_SQL: "true"
+ CRYOSTAT_DISABLE_JMX_AUTH: "true"
CRYOSTAT_DISCOVERY_JDP_ENABLED: ${CRYOSTAT_DISCOVERY_JDP_ENABLED:-true}
CRYOSTAT_DISCOVERY_PODMAN_ENABLED: ${CRYOSTAT_DISCOVERY_PODMAN_ENABLED:-true}
CRYOSTAT_DISCOVERY_DOCKER_ENABLED: ${CRYOSTAT_DISCOVERY_DOCKER_ENABLED:-true}
diff --git a/compose/sample-apps.yml b/compose/sample-apps.yml
index 938a37f9d..0138b8ca3 100644
--- a/compose/sample-apps.yml
+++ b/compose/sample-apps.yml
@@ -119,6 +119,7 @@ services:
CRYOSTAT_AGENT_HARVESTER_MAX_FILES: 3
CRYOSTAT_AGENT_HARVESTER_EXIT_MAX_AGE_MS: 60000
CRYOSTAT_AGENT_HARVESTER_EXIT_MAX_SIZE_B: 153600 # "$(echo 1024*150 | bc)"
+ CRYOSTAT_AGENT_API_WRITES_ENABLED: "true"
restart: always
healthcheck:
test: curl --fail http://localhost:10010 || exit 1
diff --git a/pom.xml b/pom.xml
index a9e433848..1e5fdcb9d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,6 +22,7 @@
1.16.1
2.13.0
+ 4.4
5.2.1
3.13.0
1.7
@@ -164,6 +165,11 @@
commons-io
${org.apache.commons.io.version}
+
+ org.apache.commons
+ commons-collections4
+ ${org.apache.commons.collections.version}
+
org.apache.httpcomponents.client5
httpclient5
diff --git a/schema/openapi.yaml b/schema/openapi.yaml
index a9a86dbf3..87a077fa5 100644
--- a/schema/openapi.yaml
+++ b/schema/openapi.yaml
@@ -2278,6 +2278,7 @@ paths:
nullable: true
type: string
restart:
+ deprecated: true
nullable: true
type: boolean
toDisk:
diff --git a/schema/schema.graphql b/schema/schema.graphql
index 69284ffef..a3b06cc12 100644
--- a/schema/schema.graphql
+++ b/schema/schema.graphql
@@ -274,6 +274,7 @@ input DiscoveryNodeFilterInput {
name: String
names: [String]
nodeTypes: [String]
+ targetIds: [BigInteger]
}
input Entry_String_StringInput {
diff --git a/src/main/docker/include/jmc-agent.jar b/src/main/docker/include/jmc-agent.jar
new file mode 100644
index 000000000..c0ff66d4a
Binary files /dev/null and b/src/main/docker/include/jmc-agent.jar differ
diff --git a/src/main/java/io/cryostat/JsonRequestFilter.java b/src/main/java/io/cryostat/JsonRequestFilter.java
index 5c9109554..c61651b27 100644
--- a/src/main/java/io/cryostat/JsonRequestFilter.java
+++ b/src/main/java/io/cryostat/JsonRequestFilter.java
@@ -26,6 +26,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+import jakarta.inject.Inject;
import jakarta.ws.rs.container.ContainerRequestContext;
import jakarta.ws.rs.container.ContainerRequestFilter;
import jakarta.ws.rs.core.MediaType;
@@ -45,7 +46,7 @@ public class JsonRequestFilter implements ContainerRequestFilter {
"/api/v3/graphql");
private final Map compiledPatterns = new HashMap<>();
- private final ObjectMapper objectMapper = new ObjectMapper();
+ @Inject ObjectMapper objectMapper;
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
diff --git a/src/main/java/io/cryostat/credentials/CredentialsFinder.java b/src/main/java/io/cryostat/credentials/CredentialsFinder.java
new file mode 100644
index 000000000..3d0eb7225
--- /dev/null
+++ b/src/main/java/io/cryostat/credentials/CredentialsFinder.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright The Cryostat Authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.cryostat.credentials;
+
+import java.net.URI;
+import java.util.Optional;
+
+import io.cryostat.expressions.MatchExpressionEvaluator;
+import io.cryostat.targets.Target;
+import io.cryostat.targets.Target.EventKind;
+import io.cryostat.targets.Target.TargetDiscovery;
+
+import io.quarkus.vertx.ConsumeEvent;
+import io.smallrye.common.annotation.Blocking;
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.inject.Inject;
+import org.apache.commons.collections4.BidiMap;
+import org.apache.commons.collections4.bidimap.DualHashBidiMap;
+import org.jboss.logging.Logger;
+import org.projectnessie.cel.tools.ScriptException;
+
+@ApplicationScoped
+public class CredentialsFinder {
+
+ @Inject MatchExpressionEvaluator expressionEvaluator;
+ @Inject Logger logger;
+
+ private final BidiMap cache = new DualHashBidiMap<>();
+
+ @ConsumeEvent(Credential.CREDENTIALS_DELETED)
+ void onCredentialsDeleted(Credential credential) {
+ cache.removeValue(credential);
+ }
+
+ @ConsumeEvent(Target.TARGET_JVM_DISCOVERY)
+ void onMessage(TargetDiscovery event) {
+ if (EventKind.LOST.equals(event.kind())) {
+ cache.remove(event.serviceRef());
+ }
+ }
+
+ @Blocking
+ public Optional getCredentialsForTarget(Target target) {
+ return Optional.ofNullable(
+ cache.computeIfAbsent(
+ target,
+ t ->
+ Credential.listAll().stream()
+ .filter(
+ c -> {
+ try {
+ return expressionEvaluator.applies(
+ c.matchExpression, t);
+ } catch (ScriptException e) {
+ logger.error(e);
+ return false;
+ }
+ })
+ .findFirst()
+ .orElse(null)));
+ }
+
+ @Blocking
+ public Optional getCredentialsForConnectUrl(URI connectUrl) {
+ return Target.find("connectUrl", connectUrl)
+ .singleResultOptional()
+ .flatMap(this::getCredentialsForTarget);
+ }
+}
diff --git a/src/main/java/io/cryostat/discovery/ContainerDiscovery.java b/src/main/java/io/cryostat/discovery/ContainerDiscovery.java
index 1ce795ff2..48dffc944 100644
--- a/src/main/java/io/cryostat/discovery/ContainerDiscovery.java
+++ b/src/main/java/io/cryostat/discovery/ContainerDiscovery.java
@@ -251,16 +251,14 @@ private void doContainerListRequest(Consumer> successHandler
item.body(),
new TypeReference>() {}));
} catch (JsonProcessingException e) {
- logger.error("Json processing error");
+ logger.error("Json processing error", e);
}
},
failure -> {
- logger.error(
- String.format("%s API request failed", getRealm()),
- failure);
+ logger.errorv(failure, "{0} API request failed", getRealm());
});
} catch (JsonProcessingException e) {
- logger.error("Json processing error");
+ logger.error("Json processing error", e);
}
}
@@ -279,13 +277,12 @@ private CompletableFuture doContainerInspectRequest(ContainerS
result.complete(
mapper.readValue(item.body(), ContainerDetails.class));
} catch (JsonProcessingException e) {
- logger.error("Json processing error");
+ logger.error("Json processing error", e);
result.completeExceptionally(e);
}
},
failure -> {
- logger.error(
- String.format("%s API request failed", getRealm()), failure);
+ logger.errorv(failure, "{0} API request failed", getRealm());
result.completeExceptionally(failure);
});
return result;
@@ -322,7 +319,7 @@ public void handleContainerEvent(ContainerSpec desc, EventKind evtKind) {
.Hostname;
} catch (InterruptedException | TimeoutException | ExecutionException e) {
containers.remove(desc);
- logger.warn(String.format("Invalid %s target observed", getRealm()), e);
+ logger.warnv(e, "Invalid {0} target observed", getRealm());
return;
}
}
@@ -331,7 +328,7 @@ public void handleContainerEvent(ContainerSpec desc, EventKind evtKind) {
connectUrl = URI.create(serviceUrl.toString());
} catch (MalformedURLException | URISyntaxException e) {
containers.remove(desc);
- logger.warn(String.format("Invalid %s target observed", getRealm()), e);
+ logger.warnv(e, "Invalid {0} target observed", getRealm());
return;
}
diff --git a/src/main/java/io/cryostat/discovery/CustomDiscovery.java b/src/main/java/io/cryostat/discovery/CustomDiscovery.java
index 0d94ada2a..3d3428426 100644
--- a/src/main/java/io/cryostat/discovery/CustomDiscovery.java
+++ b/src/main/java/io/cryostat/discovery/CustomDiscovery.java
@@ -25,6 +25,7 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import io.cryostat.ConfigProperties;
import io.cryostat.V2Response;
import io.cryostat.credentials.Credential;
import io.cryostat.expressions.MatchExpression;
@@ -49,6 +50,7 @@
import jakarta.ws.rs.core.Response;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.hibernate.exception.ConstraintViolationException;
import org.jboss.logging.Logger;
import org.jboss.resteasy.reactive.RestForm;
@@ -68,6 +70,9 @@ public class CustomDiscovery {
@Inject EventBus bus;
@Inject TargetConnectionManager connectionManager;
+ @ConfigProperty(name = ConfigProperties.CONNECTIONS_FAILED_TIMEOUT)
+ Duration timeout;
+
@Transactional
void onStart(@Observes StartupEvent evt) {
DiscoveryNode universe = DiscoveryNode.getUniverse();
@@ -140,7 +145,7 @@ Response doV2Create(
credential,
conn -> conn.getJvmIdentifier().getHash())
.await()
- .atMost(Duration.ofSeconds(10));
+ .atMost(timeout);
} catch (Exception e) {
logger.error("Target connection failed", e);
return Response.status(Response.Status.BAD_REQUEST.getStatusCode())
diff --git a/src/main/java/io/cryostat/discovery/Discovery.java b/src/main/java/io/cryostat/discovery/Discovery.java
index ffd4bb369..2b2aa99a6 100644
--- a/src/main/java/io/cryostat/discovery/Discovery.java
+++ b/src/main/java/io/cryostat/discovery/Discovery.java
@@ -397,17 +397,17 @@ public void execute(JobExecutionContext context) throws JobExecutionException {
var cb = PluginCallback.create(plugin);
if (refresh) {
cb.refresh();
- logger.infov(
+ logger.debugv(
"Refreshed discovery plugin: {0} @ {1}", plugin.realm, plugin.callback);
} else {
cb.ping();
- logger.infov(
+ logger.debugv(
"Retained discovery plugin: {0} @ {1}", plugin.realm, plugin.callback);
}
} catch (Exception e) {
if (plugin != null) {
- logger.infov(
- "Pruned discovery plugin: {0} @ {1}", plugin.realm, plugin.callback);
+ logger.debugv(
+ e, "Pruned discovery plugin: {0} @ {1}", plugin.realm, plugin.callback);
plugin.realm.delete();
plugin.delete();
new DiscoveryPlugin.PluginCallback.DiscoveryPluginAuthorizationHeaderFactory(
diff --git a/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java b/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java
index 75eaf20b7..81a03bf07 100644
--- a/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java
+++ b/src/main/java/io/cryostat/discovery/DiscoveryPlugin.java
@@ -89,6 +89,8 @@ public void prePersist(DiscoveryPlugin plugin) {
return;
}
if (plugin.callback == null) {
+ plugin.realm.delete();
+ plugin.delete();
throw new IllegalArgumentException();
}
try {
@@ -97,8 +99,12 @@ public void prePersist(DiscoveryPlugin plugin) {
"Registered discovery plugin: {0} @ {1}",
plugin.realm.name, plugin.callback);
} catch (URISyntaxException e) {
+ plugin.realm.delete();
+ plugin.delete();
throw new IllegalArgumentException(e);
} catch (Exception e) {
+ plugin.realm.delete();
+ plugin.delete();
logger.error("Discovery Plugin ping failed", e);
throw e;
}
diff --git a/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java b/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java
index ac33b70e3..1f4de6f9e 100644
--- a/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java
+++ b/src/main/java/io/cryostat/expressions/MatchExpressionEvaluator.java
@@ -100,19 +100,16 @@ boolean load(String matchExpression, Target target) throws ScriptException {
}
void invalidate(String matchExpression) {
+ var cache = cacheManager.getCache(CACHE_NAME).orElseThrow();
// 0-index is important here. the argument order of the load() method determines the
// composite key order
- cacheManager
- .getCache(CACHE_NAME)
- .ifPresent(
- c ->
- c.invalidateIf(
- k ->
- Objects.equals(
- (String)
- ((CompositeCacheKey) k)
- .getKeyElements()[0],
- matchExpression)));
+ cache.invalidateIf(
+ k ->
+ Objects.equals(
+ (String) ((CompositeCacheKey) k).getKeyElements()[0],
+ matchExpression))
+ .subscribe()
+ .with((v) -> {}, logger::warn);
}
public boolean applies(MatchExpression matchExpression, Target target) throws ScriptException {
diff --git a/src/main/java/io/cryostat/graphql/ActiveRecordings.java b/src/main/java/io/cryostat/graphql/ActiveRecordings.java
index a78c3c490..4f3f35985 100644
--- a/src/main/java/io/cryostat/graphql/ActiveRecordings.java
+++ b/src/main/java/io/cryostat/graphql/ActiveRecordings.java
@@ -119,7 +119,7 @@ public List archiveRecording(
.map(n -> n.target))
.flatMap(
t ->
- t.activeRecordings.stream()
+ recordingHelper.listActiveRecordings(t).stream()
.filter(r -> recordings == null || recordings.test(r)))
.map(
recording -> {
@@ -148,7 +148,7 @@ public List stopRecording(
.map(n -> n.target))
.flatMap(
t ->
- t.activeRecordings.stream()
+ recordingHelper.listActiveRecordings(t).stream()
.filter(r -> recordings == null || recordings.test(r)))
.map(
recording -> {
@@ -172,23 +172,16 @@ public List stopRecording(
+ " the subtrees of the discovery nodes matching the given filter")
public List deleteRecording(
@NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) {
- var activeRecordings =
- DiscoveryNode.listAll().stream()
- .filter(nodes)
- .flatMap(
- node ->
- RootNode.recurseChildren(node, n -> n.target != null)
- .stream()
- .map(n -> n.target))
- .flatMap(
- t ->
- t.activeRecordings.stream()
- .filter(
- r ->
- recordings == null
- || recordings.test(r)))
- .toList();
- return activeRecordings.stream()
+ return DiscoveryNode.listAll().stream()
+ .filter(nodes)
+ .flatMap(
+ node ->
+ RootNode.recurseChildren(node, n -> n.target != null).stream()
+ .map(n -> n.target))
+ .flatMap(
+ t ->
+ recordingHelper.listActiveRecordings(t).stream()
+ .filter(r -> recordings == null || recordings.test(r)))
.map(
recording -> {
try {
@@ -261,7 +254,7 @@ public Uni doSnapshot(@Source Target target) {
@Blocking
@Transactional
@Description("Stop the specified Flight Recording")
- public Uni doStop(@Source ActiveRecording recording) {
+ public Uni doStop(@Source ActiveRecording recording) throws Exception {
var ar = ActiveRecording.findById(recording.id);
return recordingHelper.stopRecording(ar);
}
@@ -358,7 +351,7 @@ public void setLabels(Map labels) {
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public static class RecordingMetadata {
- public @Nullable Map labels;
+ public @Nullable Map labels = new HashMap<>();
}
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
diff --git a/src/main/java/io/cryostat/graphql/RootNode.java b/src/main/java/io/cryostat/graphql/RootNode.java
index ba5f5e74e..90b76fcb4 100644
--- a/src/main/java/io/cryostat/graphql/RootNode.java
+++ b/src/main/java/io/cryostat/graphql/RootNode.java
@@ -68,6 +68,7 @@ static Set recurseChildren(
public static class DiscoveryNodeFilter implements Predicate {
public @Nullable Long id;
public @Nullable List ids;
+ public @Nullable List targetIds;
public @Nullable String name;
public @Nullable List names;
public @Nullable List nodeTypes;
@@ -78,6 +79,12 @@ public static class DiscoveryNodeFilter implements Predicate {
public boolean test(DiscoveryNode t) {
Predicate matchesId = n -> id == null || id.equals(n.id);
Predicate matchesIds = n -> ids == null || ids.contains(n.id);
+ Predicate matchesTargetIds =
+ n ->
+ targetIds == null
+ || (targetIds != null
+ && n.target != null
+ && targetIds.contains(n.target.id));
Predicate matchesName = n -> name == null || name.equals(n.name);
Predicate matchesNames = n -> names == null || names.contains(n.name);
Predicate matchesNodeTypes =
@@ -104,6 +111,7 @@ public boolean test(DiscoveryNode t) {
return List.of(
matchesId,
matchesIds,
+ matchesTargetIds,
matchesName,
matchesNames,
matchesNodeTypes,
diff --git a/src/main/java/io/cryostat/graphql/TargetNodes.java b/src/main/java/io/cryostat/graphql/TargetNodes.java
index 321653955..dfe4907a0 100644
--- a/src/main/java/io/cryostat/graphql/TargetNodes.java
+++ b/src/main/java/io/cryostat/graphql/TargetNodes.java
@@ -15,7 +15,9 @@
*/
package io.cryostat.graphql;
+import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
import io.cryostat.core.net.JFRConnection;
import io.cryostat.core.net.MBeanMetrics;
@@ -36,6 +38,7 @@
import io.smallrye.graphql.api.Nullable;
import io.smallrye.mutiny.Uni;
import jakarta.inject.Inject;
+import org.apache.commons.lang3.StringUtils;
import org.eclipse.microprofile.graphql.Description;
import org.eclipse.microprofile.graphql.GraphQLApi;
import org.eclipse.microprofile.graphql.NonNull;
@@ -61,6 +64,7 @@ public List getTargetNodes(DiscoveryNodeFilter filter) {
// the one we end up selecting for here.
// .filter(distinctWith(t -> t.jvmId))
.map(t -> t.discoveryNode)
+ .filter(Objects::nonNull)
.filter(n -> filter == null ? true : filter.test(n))
.toList();
}
@@ -75,11 +79,13 @@ public ActiveRecordings activeRecordings(
@Source Target target, @Nullable ActiveRecordingsFilter filter) {
var fTarget = Target.findById(target.id);
var recordings = new ActiveRecordings();
- recordings.data =
- fTarget.activeRecordings.stream()
- .filter(r -> filter == null || filter.test(r))
- .toList();
- recordings.aggregate = AggregateInfo.fromActive(recordings.data);
+ if (StringUtils.isNotBlank(fTarget.jvmId)) {
+ recordings.data =
+ recordingHelper.listActiveRecordings(fTarget).stream()
+ .filter(r -> filter == null || filter.test(r))
+ .toList();
+ recordings.aggregate = AggregateInfo.fromActive(recordings.data);
+ }
return recordings;
}
@@ -88,11 +94,13 @@ public ArchivedRecordings archivedRecordings(
@Source Target target, @Nullable ArchivedRecordingsFilter filter) {
var fTarget = Target.findById(target.id);
var recordings = new ArchivedRecordings();
- recordings.data =
- recordingHelper.listArchivedRecordings(fTarget).stream()
- .filter(r -> filter == null || filter.test(r))
- .toList();
- recordings.aggregate = AggregateInfo.fromArchived(recordings.data);
+ if (StringUtils.isNotBlank(fTarget.jvmId)) {
+ recordings.data =
+ recordingHelper.listArchivedRecordings(fTarget).stream()
+ .filter(r -> filter == null || filter.test(r))
+ .toList();
+ recordings.aggregate = AggregateInfo.fromArchived(recordings.data);
+ }
return recordings;
}
@@ -100,15 +108,17 @@ public ArchivedRecordings archivedRecordings(
@Description("Get the active and archived recordings belonging to this target")
public Recordings recordings(@Source Target target, Context context) {
var fTarget = Target.findById(target.id);
+ var recordings = new Recordings();
+ if (StringUtils.isBlank(fTarget.jvmId)) {
+ return recordings;
+ }
var dfe = context.unwrap(DataFetchingEnvironment.class);
var requestedFields =
dfe.getSelectionSet().getFields().stream().map(field -> field.getName()).toList();
- var recordings = new Recordings();
-
if (requestedFields.contains("active")) {
recordings.active = new ActiveRecordings();
- recordings.active.data = fTarget.activeRecordings;
+ recordings.active.data = recordingHelper.listActiveRecordings(fTarget);
recordings.active.aggregate = AggregateInfo.fromActive(recordings.active.data);
}
@@ -130,20 +140,20 @@ public Uni mbeanMetrics(@Source Target target) {
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public static class Recordings {
- public @NonNull ActiveRecordings active;
- public @NonNull ArchivedRecordings archived;
+ public @NonNull ActiveRecordings active = new ActiveRecordings();
+ public @NonNull ArchivedRecordings archived = new ArchivedRecordings();
}
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public static class ActiveRecordings {
- public @NonNull List data;
- public @NonNull AggregateInfo aggregate;
+ public @NonNull List data = new ArrayList<>();
+ public @NonNull AggregateInfo aggregate = AggregateInfo.fromActive(data);
}
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public static class ArchivedRecordings {
- public @NonNull List data;
- public @NonNull AggregateInfo aggregate;
+ public @NonNull List data = new ArrayList<>();
+ public @NonNull AggregateInfo aggregate = AggregateInfo.fromArchived(data);
}
@SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
diff --git a/src/main/java/io/cryostat/recordings/ActiveRecording.java b/src/main/java/io/cryostat/recordings/ActiveRecording.java
index 9ff46c66e..e0fa5b750 100644
--- a/src/main/java/io/cryostat/recordings/ActiveRecording.java
+++ b/src/main/java/io/cryostat/recordings/ActiveRecording.java
@@ -30,6 +30,7 @@
import io.cryostat.ws.MessagingServer;
import io.cryostat.ws.Notification;
+import com.fasterxml.jackson.annotation.JsonIgnore;
import io.quarkus.hibernate.orm.panache.PanacheEntity;
import io.vertx.mutiny.core.eventbus.EventBus;
import jakarta.enterprise.context.ApplicationScoped;
@@ -42,11 +43,8 @@
import jakarta.persistence.PostPersist;
import jakarta.persistence.PostRemove;
import jakarta.persistence.PostUpdate;
-import jakarta.persistence.PreRemove;
-import jakarta.persistence.PreUpdate;
import jakarta.persistence.Table;
import jakarta.persistence.UniqueConstraint;
-import jakarta.transaction.Transactional;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.PositiveOrZero;
@@ -59,7 +57,9 @@
@EntityListeners(ActiveRecording.Listener.class)
@Table(
uniqueConstraints = {
- @UniqueConstraint(columnNames = {"target_id", "name"}),
+ // remoteId is the unique ID assigned by the JVM to its own recordings, so these IDs are
+ // unique but only within the scope of each JVM. Since they are just sequential numeric
+ // IDs, they will not be unique across different JVMs.
@UniqueConstraint(columnNames = {"target_id", "remoteId"})
})
public class ActiveRecording extends PanacheEntity {
@@ -80,6 +80,12 @@ public class ActiveRecording extends PanacheEntity {
@PositiveOrZero public long maxSize;
@PositiveOrZero public long maxAge;
+ /**
+ * true if the recording was discovered on the Target and must have been created by some
+ * external process (not Cryostat), false if created by Cryostat.
+ */
+ @JsonIgnore public boolean external;
+
@JdbcTypeCode(SqlTypes.JSON)
@NotNull
public Metadata metadata;
@@ -145,27 +151,6 @@ public void setMetadata(Metadata metadata) {
this.metadata = metadata;
}
- @Transactional
- public static boolean deleteFromTarget(Target target, String recordingName) {
- Optional recording =
- target.activeRecordings.stream()
- .filter(r -> r.name.equals(recordingName))
- .findFirst();
- boolean found = recording.isPresent();
- if (found) {
- Logger.getLogger(ActiveRecording.class)
- .debugv("Found and deleting match: {0} / {1}", target.alias, recording.get());
- recording.get().delete();
- getEntityManager().flush();
- } else {
- Logger.getLogger(ActiveRecording.class)
- .debugv(
- "No match found for recording {0} in target {1}",
- recordingName, target.alias);
- }
- return found;
- }
-
@ApplicationScoped
static class Listener {
@@ -176,6 +161,9 @@ static class Listener {
@PostPersist
public void postPersist(ActiveRecording activeRecording) {
+ if (activeRecording.external) {
+ return;
+ }
bus.publish(
Recordings.RecordingEventCategory.ACTIVE_CREATED.category(), activeRecording);
notify(
@@ -184,45 +172,11 @@ public void postPersist(ActiveRecording activeRecording) {
ActiveRecordingEvent.Payload.of(recordingHelper, activeRecording)));
}
- @PreUpdate
- public void preUpdate(ActiveRecording activeRecording) throws Exception {
- if (RecordingState.STOPPED.equals(activeRecording.state)) {
- try {
- connectionManager.executeConnectedTask(
- activeRecording.target,
- conn -> {
- RecordingHelper.getDescriptorById(conn, activeRecording.remoteId)
- .ifPresent(
- d -> {
- // this connection can fail if we are removing
- // this recording as a cascading operation after
- // the owner target was lost. It isn't too
- // important in that case that we are unable to
- // connect to the target and close the actual
- // recording, because the target probably went
- // offline or we otherwise just can't reach it.
- try {
- if (!d.getState()
- .equals(
- IRecordingDescriptor
- .RecordingState
- .STOPPED)) {
- conn.getService().stop(d);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- });
- return null;
- });
- } catch (Exception e) {
- logger.error("Failed to stop remote recording", e);
- }
- }
- }
-
@PostUpdate
public void postUpdate(ActiveRecording activeRecording) {
+ if (activeRecording.external) {
+ return;
+ }
if (RecordingState.STOPPED.equals(activeRecording.state)) {
bus.publish(
Recordings.RecordingEventCategory.ACTIVE_STOPPED.category(),
@@ -234,37 +188,11 @@ public void postUpdate(ActiveRecording activeRecording) {
}
}
- @PreRemove
- public void preRemove(ActiveRecording activeRecording) throws Exception {
- try {
- activeRecording.target.activeRecordings.remove(activeRecording);
- connectionManager.executeConnectedTask(
- activeRecording.target,
- conn -> {
- // this connection can fail if we are removing this recording as a
- // cascading operation after the owner target was lost. It isn't too
- // important in that case that we are unable to connect to the target
- // and close the actual recording, because the target probably went
- // offline or we otherwise just can't reach it.
- try {
- RecordingHelper.getDescriptor(conn, activeRecording)
- .ifPresent(
- rec ->
- Recordings.safeCloseRecording(
- conn, rec, logger));
- } catch (Exception e) {
- logger.info(e);
- }
- return null;
- });
- } catch (Exception e) {
- logger.error(e);
- throw e;
- }
- }
-
@PostRemove
public void postRemove(ActiveRecording activeRecording) {
+ if (activeRecording.external) {
+ return;
+ }
bus.publish(
Recordings.RecordingEventCategory.ACTIVE_DELETED.category(), activeRecording);
notify(
diff --git a/src/main/java/io/cryostat/recordings/RecordingHelper.java b/src/main/java/io/cryostat/recordings/RecordingHelper.java
index c5590e19a..1a615128a 100644
--- a/src/main/java/io/cryostat/recordings/RecordingHelper.java
+++ b/src/main/java/io/cryostat/recordings/RecordingHelper.java
@@ -32,12 +32,15 @@
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
+import java.util.Date;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
@@ -46,9 +49,7 @@
import org.openjdk.jmc.common.unit.IConstrainedMap;
import org.openjdk.jmc.common.unit.QuantityConversionException;
-import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID;
import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder;
-import org.openjdk.jmc.rjmx.services.jfr.IEventTypeInfo;
import org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor;
import io.cryostat.ConfigProperties;
@@ -98,6 +99,15 @@
import org.apache.hc.core5.http.HttpStatus;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.jboss.logging.Logger;
+import org.quartz.Job;
+import org.quartz.JobBuilder;
+import org.quartz.JobDetail;
+import org.quartz.JobExecutionContext;
+import org.quartz.JobExecutionException;
+import org.quartz.JobKey;
+import org.quartz.Scheduler;
+import org.quartz.Trigger;
+import org.quartz.TriggerBuilder;
import software.amazon.awssdk.core.exception.SdkClientException;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.services.s3.S3Client;
@@ -138,7 +148,7 @@ public class RecordingHelper {
@Inject EventOptionsBuilder.Factory eventOptionsBuilderFactory;
@Inject TargetTemplateService.Factory targetTemplateServiceFactory;
@Inject S3TemplateService customTemplateService;
- @Inject RecordingHelper recordingHelper;
+ @Inject Scheduler scheduler;
@Inject
@Named(Producers.BASE64_URL)
@@ -161,6 +171,8 @@ public class RecordingHelper {
CompletableFuture grafanaDatasourceURL = new CompletableFuture<>();
+ private final List jobs = new CopyOnWriteArrayList<>();
+
void onStart(@Observes StartupEvent evt) {
if (grafanaDatasourceURLProperty.isEmpty()) {
grafanaDatasourceURL.completeExceptionally(
@@ -199,7 +211,92 @@ void onStart(@Observes StartupEvent evt) {
}
}
- @Transactional
+ // FIXME hacky. This opens a remote connection on each call and updates our database with the
+ // data we find there. We should have some remote connection callback (JMX listener, WebSocket)
+ // to the target and update our database when remote recording events occur, rather than doing a
+ // full sync when this method is called.
+ public List listActiveRecordings(Target target) {
+ target = Target.find("id", target.id).singleResult();
+ try {
+ var previousRecordings = target.activeRecordings;
+ var previousIds =
+ new HashSet<>(previousRecordings.stream().map(r -> r.remoteId).toList());
+ var previousNames =
+ new HashSet<>(previousRecordings.stream().map(r -> r.name).toList());
+ List descriptors =
+ connectionManager.executeConnectedTask(
+ target, conn -> conn.getService().getAvailableRecordings());
+ boolean updated = false;
+ var it = target.activeRecordings.iterator();
+ while (it.hasNext()) {
+ var r = it.next();
+ if (!previousIds.contains(r.remoteId)) {
+ r.delete();
+ it.remove();
+ updated |= true;
+ }
+ }
+ for (var descriptor : descriptors) {
+ if (previousIds.contains(descriptor.getId())) {
+ var recording = target.getRecordingById(descriptor.getId());
+ switch (descriptor.getState()) {
+ case CREATED:
+ recording.state = RecordingState.DELAYED;
+ updated |= true;
+ break;
+ case RUNNING:
+ recording.state = RecordingState.RUNNING;
+ updated |= true;
+ break;
+ case STOPPING:
+ recording.state = RecordingState.RUNNING;
+ updated |= true;
+ break;
+ case STOPPED:
+ recording.state = RecordingState.STOPPED;
+ updated |= true;
+ break;
+ default:
+ recording.state = RecordingState.NEW;
+ updated |= true;
+ break;
+ }
+ if (updated) {
+ recording.persist();
+ }
+ continue;
+ }
+ updated |= true;
+ // TODO is there any metadata to attach here?
+ var recording = ActiveRecording.from(target, descriptor, new Metadata(Map.of()));
+ recording.external = true;
+ // FIXME this is a hack. Older Cryostat versions enforced that recordings' names
+ // were unique within the target JVM, but this could only be enforced when Cryostat
+ // was originating the recording creation. Recordings already have unique IDs, so
+ // enforcing unique names was only for the purpose of providing a tidier UI. We
+ // should remove this assumption/enforcement and allow recordings to have non-unique
+ // names. However, the UI is currently built with this expectation and often uses
+ // recordings' names as unique keys rather than their IDs.
+ while (previousNames.contains(recording.name)) {
+ recording.name = String.format("%s-%d", recording.name, recording.remoteId);
+ }
+ previousNames.add(recording.name);
+ previousIds.add(recording.remoteId);
+ recording.persist();
+ target.activeRecordings.add(recording);
+ }
+ if (updated) {
+ target.persist();
+ }
+ } catch (Exception e) {
+ logger.errorv(
+ e,
+ "Failure to synchronize existing target recording state for {0}",
+ target.connectUrl);
+ }
+ return target.activeRecordings;
+ }
+
public Uni startRecording(
Target target,
RecordingReplace replace,
@@ -207,6 +304,7 @@ public Uni startRecording(
RecordingOptions options,
Map rawLabels)
throws QuantityConversionException {
+ String recordingName = options.name();
return connectionManager.executeConnectedTaskUni(
target,
conn -> {
@@ -238,17 +336,17 @@ public Uni startRecording(
throw new EntityExistsException(
"Recording", options.name());
}
- if (!ActiveRecording.deleteFromTarget(
- target, options.name())) {
- logger.warnf(
- "Could not delete recording %s from target %s",
- options.name(), target.alias);
- }
+ listActiveRecordings(target).stream()
+ .filter(r -> r.name.equals(recordingName))
+ .forEach(this::deleteRecording);
});
IRecordingDescriptor desc =
conn.getService()
- .start(recordingOptions, enableEvents(target, template));
+ .start(
+ recordingOptions,
+ template.getName(),
+ template.getType());
Map labels = new HashMap<>(rawLabels);
labels.put("template.name", template.getName());
@@ -261,6 +359,28 @@ public Uni startRecording(
target.activeRecordings.add(recording);
target.persist();
+ if (!recording.continuous) {
+ JobDetail jobDetail =
+ JobBuilder.newJob(StopRecordingJob.class)
+ .withIdentity(recording.name, target.jvmId)
+ .build();
+ if (!jobs.contains(jobDetail.getKey())) {
+ Map data = jobDetail.getJobDataMap();
+ data.put("recordingId", recording.id);
+ data.put("archive", options.archiveOnStop().orElse(false));
+ Trigger trigger =
+ TriggerBuilder.newTrigger()
+ .withIdentity(recording.name, target.jvmId)
+ .usingJobData(jobDetail.getJobDataMap())
+ .startAt(
+ new Date(
+ System.currentTimeMillis()
+ + recording.duration))
+ .build();
+ scheduler.scheduleJob(jobDetail, trigger);
+ }
+ }
+
logger.tracev(
"Started recording: {0} {1}",
target.connectUrl, target.activeRecordings);
@@ -332,19 +452,6 @@ public Uni createSnapshot(Target target) {
});
}
- public Uni stopRecording(ActiveRecording recording) {
- recording.state = RecordingState.STOPPED;
- recording.persist();
- return Uni.createFrom().item(recording);
- }
-
- @Transactional
- public Uni deleteRecording(ActiveRecording recording) {
- recording.delete();
- recording.persist();
- return Uni.createFrom().item(recording);
- }
-
private boolean snapshotIsReadable(Target target, InputStream snapshot) throws IOException {
if (!connectionManager.markConnectionInUse(target)) {
throw new IOException(
@@ -373,6 +480,55 @@ private boolean shouldRestartRecording(
}
}
+ public Uni stopRecording(ActiveRecording recording, boolean archive)
+ throws Exception {
+ var out =
+ connectionManager.executeConnectedTask(
+ recording.target,
+ conn -> {
+ var desc = getDescriptorById(conn, recording.remoteId);
+ if (desc.isEmpty()) {
+ throw new NotFoundException();
+ }
+ if (!desc.get()
+ .getState()
+ .equals(
+ org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor
+ .RecordingState.STOPPED)) {
+ conn.getService().stop(desc.get());
+ }
+ recording.state = RecordingState.STOPPED;
+ return recording;
+ });
+ out.persist();
+ if (archive) {
+ archiveRecording(out, null, null);
+ }
+ return Uni.createFrom().item(out);
+ }
+
+ public Uni stopRecording(ActiveRecording recording) throws Exception {
+ return stopRecording(recording, false);
+ }
+
+ public Uni deleteRecording(ActiveRecording recording) {
+ var closed =
+ connectionManager.executeConnectedTask(
+ recording.target,
+ conn -> {
+ var desc = getDescriptorById(conn, recording.remoteId);
+ if (desc.isEmpty()) {
+ throw new NotFoundException();
+ }
+ conn.getService().close(desc.get());
+ return recording;
+ });
+ closed.target.activeRecordings.remove(recording);
+ closed.target.persist();
+ closed.delete();
+ return Uni.createFrom().item(closed);
+ }
+
public LinkedRecordingDescriptor toExternalForm(ActiveRecording recording) {
return new LinkedRecordingDescriptor(
recording.id,
@@ -405,44 +561,6 @@ public Pair parseEventSpecifier(String eventSpecifier) {
throw new BadRequestException(eventSpecifier);
}
- private IConstrainedMap enableAllEvents(Target target) throws Exception {
- return connectionManager.executeConnectedTask(
- target,
- connection -> {
- EventOptionsBuilder builder = eventOptionsBuilderFactory.create(connection);
-
- for (IEventTypeInfo eventTypeInfo :
- connection.getService().getAvailableEventTypes()) {
- builder.addEvent(
- eventTypeInfo.getEventTypeID().getFullKey(), "enabled", "true");
- }
-
- return builder.build();
- });
- }
-
- private IConstrainedMap enableEvents(Target target, Template eventTemplate)
- throws Exception {
- if (EventTemplates.ALL_EVENTS_TEMPLATE.equals(eventTemplate)) {
- return enableAllEvents(target);
- }
- switch (eventTemplate.getType()) {
- case TARGET:
- return targetTemplateServiceFactory
- .create(target)
- .getEvents(eventTemplate.getName(), eventTemplate.getType())
- .orElseThrow();
- case CUSTOM:
- return customTemplateService
- .getEvents(eventTemplate.getName(), eventTemplate.getType())
- .orElseThrow();
- default:
- throw new BadRequestException(
- String.format(
- "Invalid/unknown event template %s", eventTemplate.getName()));
- }
- }
-
public Template getPreferredTemplate(
Target target, String templateName, TemplateType templateType) {
Objects.requireNonNull(target);
@@ -490,8 +608,7 @@ public Template getPreferredTemplate(
}
}
- static Optional getDescriptorById(
- JFRConnection connection, long remoteId) {
+ Optional getDescriptorById(JFRConnection connection, long remoteId) {
try {
return connection.getService().getAvailableRecordings().stream()
.filter(r -> remoteId == r.getId())
@@ -501,12 +618,12 @@ static Optional getDescriptorById(
}
}
- static Optional getDescriptor(
+ Optional getDescriptor(
JFRConnection connection, ActiveRecording activeRecording) {
return getDescriptorById(connection, activeRecording.remoteId);
}
- public static Optional getDescriptorByName(
+ public Optional getDescriptorByName(
JFRConnection connection, String recordingName) {
try {
return connection.getService().getAvailableRecordings().stream()
@@ -602,25 +719,25 @@ public List listArchivedRecordings(Target target) {
}
public ArchivedRecording archiveRecording(
- ActiveRecording activeRecording, String savename, Instant expiry) throws Exception {
+ ActiveRecording recording, String savename, Instant expiry) throws Exception {
// AWS object key name guidelines advise characters to avoid (% so we should not pass url
// encoded characters)
String transformedAlias =
- URLDecoder.decode(activeRecording.target.alias, StandardCharsets.UTF_8)
+ URLDecoder.decode(recording.target.alias, StandardCharsets.UTF_8)
.replaceAll("[\\._/]+", "-");
Instant now = clock.now();
String timestamp = now.truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", "");
String filename =
- String.format("%s_%s_%s.jfr", transformedAlias, activeRecording.name, timestamp);
+ String.format("%s_%s_%s.jfr", transformedAlias, recording.name, timestamp);
if (StringUtils.isBlank(savename)) {
savename = filename;
}
int mib = 1024 * 1024;
- String key = archivedRecordingKey(activeRecording.target.jvmId, filename);
+ String key = archivedRecordingKey(recording.target.jvmId, filename);
String multipartId = null;
List> parts = new ArrayList<>();
long accum = 0;
- try (var stream = remoteRecordingStreamFactory.open(activeRecording);
+ try (var stream = getActiveInputStream(recording);
var ch = Channels.newChannel(stream)) {
ByteBuffer buf = ByteBuffer.allocate(20 * mib);
CreateMultipartUploadRequest.Builder builder =
@@ -630,7 +747,7 @@ public ArchivedRecording archiveRecording(
.contentType(JFR_MIME)
.contentDisposition(
String.format("attachment; filename=\"%s\"", savename))
- .tagging(createActiveRecordingTagging(activeRecording, expiry));
+ .tagging(createActiveRecordingTagging(recording, expiry));
if (expiry != null && expiry.isAfter(Instant.now())) {
builder = builder.expires(expiry);
}
@@ -717,18 +834,18 @@ public ArchivedRecording archiveRecording(
var event =
new ActiveRecordingEvent(
Recordings.RecordingEventCategory.ACTIVE_SAVED,
- ActiveRecordingEvent.Payload.of(this, activeRecording));
+ ActiveRecordingEvent.Payload.of(this, recording));
bus.publish(event.category().category(), event.payload().recording());
bus.publish(
MessagingServer.class.getName(),
new Notification(event.category().category(), event.payload()));
}
return new ArchivedRecording(
- activeRecording.target.jvmId,
+ recording.target.jvmId,
filename,
- downloadUrl(activeRecording.target.jvmId, filename),
- reportUrl(activeRecording.target.jvmId, filename),
- activeRecording.metadata,
+ downloadUrl(recording.target.jvmId, filename),
+ reportUrl(recording.target.jvmId, filename),
+ recording.metadata,
accum,
now.getEpochSecond());
}
@@ -841,6 +958,16 @@ private int retryRead(ReadableByteChannel channel, ByteBuffer buffer) throws IOE
return read;
}
+ void safeCloseRecording(JFRConnection conn, IRecordingDescriptor rec) {
+ try {
+ conn.getService().close(rec);
+ } catch (org.openjdk.jmc.rjmx.services.jfr.FlightRecorderException e) {
+ logger.error("Failed to stop remote recording", e);
+ } catch (Exception e) {
+ logger.error("Unexpected exception", e);
+ }
+ }
+
/* Archived Recording Helpers */
public void deleteArchivedRecording(String jvmId, String filename) {
storage.deleteObject(
@@ -950,7 +1077,7 @@ public ActiveRecording updateRecordingMetadata(
notify(
new ActiveRecordingEvent(
Recordings.RecordingEventCategory.METADATA_UPDATED,
- ActiveRecordingEvent.Payload.of(recordingHelper, recording)));
+ ActiveRecordingEvent.Payload.of(this, recording)));
}
return recording;
}
@@ -1130,6 +1257,29 @@ public static RecordingReplace fromString(String replace) {
}
}
+ static class StopRecordingJob implements Job {
+
+ @Inject RecordingHelper recordingHelper;
+ @Inject Logger logger;
+
+ @Override
+ @Transactional
+ public void execute(JobExecutionContext ctx) throws JobExecutionException {
+ var jobDataMap = ctx.getJobDetail().getJobDataMap();
+ try {
+ Optional recording =
+ ActiveRecording.find("id", (Long) jobDataMap.get("recordingId"))
+ .singleResultOptional();
+ if (recording.isPresent()) {
+ recordingHelper.stopRecording(
+ recording.get(), (Boolean) jobDataMap.get("archive"));
+ }
+ } catch (Exception e) {
+ throw new JobExecutionException(e);
+ }
+ }
+ }
+
static class RecordingNotFoundException extends Exception {
public RecordingNotFoundException(String targetId, String recordingName) {
super(
diff --git a/src/main/java/io/cryostat/recordings/Recordings.java b/src/main/java/io/cryostat/recordings/Recordings.java
index 545a72d88..ba1bf5965 100644
--- a/src/main/java/io/cryostat/recordings/Recordings.java
+++ b/src/main/java/io/cryostat/recordings/Recordings.java
@@ -29,15 +29,12 @@
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.openjdk.jmc.common.unit.IConstrainedMap;
import org.openjdk.jmc.common.unit.IOptionDescriptor;
import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder;
-import org.openjdk.jmc.rjmx.services.jfr.FlightRecorderException;
import org.openjdk.jmc.rjmx.services.jfr.IFlightRecorderService;
import org.openjdk.jmc.rjmx.services.jfr.IRecordingDescriptor;
@@ -47,7 +44,6 @@
import io.cryostat.V2Response;
import io.cryostat.core.EventOptionsBuilder;
import io.cryostat.core.RecordingOptionsCustomizer;
-import io.cryostat.core.net.JFRConnection;
import io.cryostat.core.sys.Clock;
import io.cryostat.core.templates.Template;
import io.cryostat.core.templates.TemplateType;
@@ -121,7 +117,6 @@ public class Recordings {
@Inject StorageBuckets storageBuckets;
@Inject S3Presigner presigner;
@Inject RemoteRecordingInputStreamFactory remoteRecordingStreamFactory;
- @Inject ScheduledExecutorService scheduler;
@Inject ObjectMapper mapper;
@Inject RecordingHelper recordingHelper;
@Inject Logger logger;
@@ -452,7 +447,9 @@ public Collection listFsArchives(@RestPath String jv
@RolesAllowed("read")
public List listForTarget(@RestPath long id) throws Exception {
Target target = Target.find("id", id).singleResult();
- return target.activeRecordings.stream().map(recordingHelper::toExternalForm).toList();
+ return recordingHelper.listActiveRecordings(target).stream()
+ .map(recordingHelper::toExternalForm)
+ .toList();
}
@GET
@@ -474,7 +471,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod
throws Exception {
Target target = Target.find("id", targetId).singleResult();
Optional recording =
- target.activeRecordings.stream()
+ recordingHelper.listActiveRecordings(target).stream()
.filter(rec -> rec.remoteId == remoteId)
.findFirst();
if (!recording.isPresent()) {
@@ -513,7 +510,7 @@ public Response patchV1(@RestPath URI connectUrl, @RestPath String recordingName
Target target = Target.getTargetByConnectUrl(connectUrl);
Optional recording =
connectionManager.executeConnectedTask(
- target, conn -> RecordingHelper.getDescriptorByName(conn, recordingName));
+ target, conn -> recordingHelper.getDescriptorByName(conn, recordingName));
if (recording.isEmpty()) {
throw new NotFoundException();
}
@@ -596,7 +593,7 @@ public Response createRecording(
@RestForm Optional replace,
// restart param is deprecated, only 'replace' should be used and takes priority if both
// are provided
- @RestForm Optional restart,
+ @Deprecated @RestForm Optional restart,
@RestForm Optional duration,
@RestForm Optional toDisk,
@RestForm Optional maxAge,
@@ -644,35 +641,11 @@ public Response createRecording(
.await()
.atMost(Duration.ofSeconds(10));
- if (recording.duration > 0) {
- scheduler.schedule(
- () -> stopRecording(recording.id, archiveOnStop.orElse(false)),
- recording.duration,
- TimeUnit.MILLISECONDS);
- }
-
return Response.status(Response.Status.CREATED)
.entity(recordingHelper.toExternalForm(recording))
.build();
}
- @Transactional
- void stopRecording(long id, boolean archive) {
- ActiveRecording.findByIdOptional(id)
- .ifPresent(
- recording -> {
- try {
- recording.state = RecordingState.STOPPED;
- recording.persist();
- if (archive) {
- recordingHelper.archiveRecording(recording, null, null);
- }
- } catch (Exception e) {
- logger.error("couldn't update recording", e);
- }
- });
- }
-
@POST
@Transactional
@Blocking
@@ -700,7 +673,7 @@ public Response deleteRecordingV1(@RestPath URI connectUrl, @RestPath String rec
}
Target target = Target.getTargetByConnectUrl(connectUrl);
long remoteId =
- target.activeRecordings.stream()
+ recordingHelper.listActiveRecordings(target).stream()
.filter(r -> Objects.equals(r.name, recordingName))
.findFirst()
.map(r -> r.remoteId)
@@ -720,14 +693,11 @@ public Response deleteRecordingV1(@RestPath URI connectUrl, @RestPath String rec
@RolesAllowed("write")
public void deleteRecording(@RestPath long targetId, @RestPath long remoteId) throws Exception {
Target target = Target.find("id", targetId).singleResult();
- target.activeRecordings.stream()
- .filter(r -> r.remoteId == remoteId)
- .findFirst()
- .ifPresentOrElse(
- recordingHelper::deleteRecording,
- () -> {
- throw new NotFoundException();
- });
+ var recording = target.getRecordingById(remoteId);
+ if (recording == null) {
+ throw new NotFoundException();
+ }
+ recordingHelper.deleteRecording(recording);
}
@DELETE
@@ -791,16 +761,6 @@ public void deleteArchivedRecording(@RestPath String jvmId, @RestPath String fil
}
}
- static void safeCloseRecording(JFRConnection conn, IRecordingDescriptor rec, Logger logger) {
- try {
- conn.getService().close(rec);
- } catch (FlightRecorderException e) {
- logger.error("Failed to stop remote recording", e);
- } catch (Exception e) {
- logger.error("Unexpected exception", e);
- }
- }
-
@POST
@Blocking
@Path("/api/v1/targets/{connectUrl}/recordings/{recordingName}/upload")
@@ -809,7 +769,7 @@ public Response uploadActiveToGrafanaV1(
@RestPath URI connectUrl, @RestPath String recordingName) {
Target target = Target.getTargetByConnectUrl(connectUrl);
long remoteId =
- target.activeRecordings.stream()
+ recordingHelper.listActiveRecordings(target).stream()
.filter(r -> Objects.equals(r.name, recordingName))
.findFirst()
.map(r -> r.remoteId)
diff --git a/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java b/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java
index c95677a1d..4b80e0a9c 100644
--- a/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java
+++ b/src/main/java/io/cryostat/recordings/RemoteRecordingInputStreamFactory.java
@@ -31,13 +31,14 @@
public class RemoteRecordingInputStreamFactory {
@Inject TargetConnectionManager connectionManager;
+ @Inject RecordingHelper recordingHelper;
public ProgressInputStream open(ActiveRecording recording) throws Exception {
return connectionManager.executeConnectedTask(
recording.target,
conn -> {
IRecordingDescriptor desc =
- RecordingHelper.getDescriptor(conn, recording).orElseThrow();
+ recordingHelper.getDescriptor(conn, recording).orElseThrow();
return open(conn, recording.target, desc);
});
}
diff --git a/src/main/java/io/cryostat/reports/Reports.java b/src/main/java/io/cryostat/reports/Reports.java
index 0950fc82b..dfd3677a7 100644
--- a/src/main/java/io/cryostat/reports/Reports.java
+++ b/src/main/java/io/cryostat/reports/Reports.java
@@ -116,7 +116,7 @@ public Uni