From 202029809e808b8e6a495838b938a838dec7b128 Mon Sep 17 00:00:00 2001 From: Andrew Azores Date: Thu, 18 Apr 2024 18:13:27 -0400 Subject: [PATCH] feat(graphql): reimplement GraphQL API (#294) Co-authored-by: Atif Ali Co-authored-by: Atif Ali <56743004+aali309@users.noreply.github.com> --- .github/workflows/pr-ci.yaml | 50 +- compose/cryostat.yml | 6 +- pom.xml | 4 + schema/openapi.yaml | 76 + schema/schema.graphql | 304 +++ schema/update.bash | 2 +- .../java/io/cryostat/JsonRequestFilter.java | 7 +- .../cryostat/ObjectMapperCustomization.java | 83 + .../io/cryostat/graphql/ActiveRecordings.java | 422 ++++ .../cryostat/graphql/ArchivedRecordings.java | 157 ++ .../io/cryostat/graphql/EnvironmentNodes.java | 39 + .../java/io/cryostat/graphql/GraphQL.java | 63 + .../io/cryostat/graphql/RecordingLinks.java | 55 + .../java/io/cryostat/graphql/RootNode.java | 117 + .../io/cryostat/graphql/SchemaExtension.java | 54 + .../java/io/cryostat/graphql/TargetNodes.java | 175 ++ .../graphql/matchers/EqualityMatcher.java | 71 + .../graphql/matchers/LabelMatcher.java | 22 + .../matchers/LabelSelectorMatcher.java | 121 + .../cryostat/graphql/matchers/SetMatcher.java | 82 + .../cryostat/recordings/ActiveRecording.java | 11 +- .../cryostat/recordings/RecordingHelper.java | 396 ++- .../io/cryostat/recordings/Recordings.java | 191 +- .../java/io/cryostat/rules/RuleService.java | 70 +- .../cryostat/rules/ScheduledArchiveJob.java | 2 +- src/main/java/io/cryostat/targets/Target.java | 19 +- .../java/io/cryostat/ws/MessagingServer.java | 2 +- src/main/resources/application.properties | 6 + src/test/java/itest/CustomTargetsTest.java | 12 +- src/test/java/itest/GraphQLTest.java | 2125 +++++++++++++++++ 30 files changed, 4470 insertions(+), 274 deletions(-) create mode 100644 schema/schema.graphql create mode 100644 src/main/java/io/cryostat/ObjectMapperCustomization.java create mode 100644 src/main/java/io/cryostat/graphql/ActiveRecordings.java create mode 100644 src/main/java/io/cryostat/graphql/ArchivedRecordings.java create mode 100644 src/main/java/io/cryostat/graphql/EnvironmentNodes.java create mode 100644 src/main/java/io/cryostat/graphql/GraphQL.java create mode 100644 src/main/java/io/cryostat/graphql/RecordingLinks.java create mode 100644 src/main/java/io/cryostat/graphql/RootNode.java create mode 100644 src/main/java/io/cryostat/graphql/SchemaExtension.java create mode 100644 src/main/java/io/cryostat/graphql/TargetNodes.java create mode 100644 src/main/java/io/cryostat/graphql/matchers/EqualityMatcher.java create mode 100644 src/main/java/io/cryostat/graphql/matchers/LabelMatcher.java create mode 100644 src/main/java/io/cryostat/graphql/matchers/LabelSelectorMatcher.java create mode 100644 src/main/java/io/cryostat/graphql/matchers/SetMatcher.java create mode 100644 src/test/java/itest/GraphQLTest.java diff --git a/.github/workflows/pr-ci.yaml b/.github/workflows/pr-ci.yaml index af5da51f0..a95e1b83b 100644 --- a/.github/workflows/pr-ci.yaml +++ b/.github/workflows/pr-ci.yaml @@ -270,28 +270,28 @@ jobs: body: commentBody }); - # compare-graphql-schema: - # needs: [update-schemas] - # runs-on: ubuntu-latest - # permissions: - # pull-requests: write - # steps: - # - uses: actions/download-artifact@v3 - # with: - # name: graphql-diff - # - name: Comment schema check result - # uses: actions/github-script@v6 - # with: - # script: | - # const diffFmt = s => { - # return "```diff\n" + s + "\n```"; - # }; - # const commentBody = ${{ needs.update-schemas.outputs.GRAPHQL_STATUS }} == '0' - # ? `No GraphQL schema changes detected.` - # : `GraphQL schema change detected:\n\n${diffFmt(require('fs').readFileSync('${{ needs.update-schemas.outputs.GRAPHQL_DIFF_FILE }}'))}`; - # github.rest.issues.createComment({ - # issue_number: context.issue.number, - # owner: context.repo.owner, - # repo: context.repo.repo, - # body: commentBody - # }); + compare-graphql-schema: + needs: [update-schemas] + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: actions/download-artifact@v3 + with: + name: graphql-diff + - name: Comment schema check result + uses: actions/github-script@v6 + with: + script: | + const diffFmt = s => { + return "```diff\n" + s + "\n```"; + }; + const commentBody = ${{ needs.update-schemas.outputs.GRAPHQL_STATUS }} == '0' + ? `No GraphQL schema changes detected.` + : `GraphQL schema change detected:\n\n${diffFmt(require('fs').readFileSync('${{ needs.update-schemas.outputs.GRAPHQL_DIFF_FILE }}'))}`; + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: commentBody + }); diff --git a/compose/cryostat.yml b/compose/cryostat.yml index f77c3167a..a1f4145e6 100644 --- a/compose/cryostat.yml +++ b/compose/cryostat.yml @@ -29,9 +29,9 @@ services: QUARKUS_HTTP_HOST: "cryostat" QUARKUS_HTTP_PORT: ${CRYOSTAT_HTTP_PORT} QUARKUS_HIBERNATE_ORM_LOG_SQL: "true" - CRYOSTAT_DISCOVERY_JDP_ENABLED: "true" - CRYOSTAT_DISCOVERY_PODMAN_ENABLED: "true" - CRYOSTAT_DISCOVERY_DOCKER_ENABLED: "true" + CRYOSTAT_DISCOVERY_JDP_ENABLED: ${CRYOSTAT_DISCOVERY_JDP_ENABLED:-true} + CRYOSTAT_DISCOVERY_PODMAN_ENABLED: ${CRYOSTAT_DISCOVERY_PODMAN_ENABLED:-true} + CRYOSTAT_DISCOVERY_DOCKER_ENABLED: ${CRYOSTAT_DISCOVERY_DOCKER_ENABLED:-true} JAVA_OPTS_APPEND: "-XX:+FlightRecorder -XX:StartFlightRecording=name=onstart,settings=default,disk=true,maxage=5m -XX:StartFlightRecording=name=startup,settings=profile,disk=true,duration=30s -Dcom.sun.management.jmxremote.autodiscovery=true -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9091 -Dcom.sun.management.jmxremote.rmi.port=9091 -Djava.rmi.server.hostname=127.0.0.1 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.local.only=false" restart: unless-stopped healthcheck: diff --git a/pom.xml b/pom.xml index 42e6e8d9a..9c38d68ac 100644 --- a/pom.xml +++ b/pom.xml @@ -108,6 +108,10 @@ io.quarkus quarkus-smallrye-openapi + + io.quarkus + quarkus-smallrye-graphql + io.smallrye.reactive smallrye-mutiny-vertx-web-client diff --git a/schema/openapi.yaml b/schema/openapi.yaml index fcda791f2..4ce4608a6 100644 --- a/schema/openapi.yaml +++ b/schema/openapi.yaml @@ -31,6 +31,8 @@ components: type: integer downloadUrl: type: string + jvmId: + type: string metadata: $ref: '#/components/schemas/Metadata' name: @@ -382,6 +384,31 @@ paths: - SecurityScheme: [] tags: - Recordings + /api/beta/fs/recordings/{jvmId}: + get: + parameters: + - in: path + name: jvmId + required: true + schema: + type: string + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/ArchivedRecordingDirectory' + type: array + description: OK + "401": + description: Not Authorized + "403": + description: Not Allowed + security: + - SecurityScheme: [] + tags: + - Recordings /api/beta/fs/recordings/{jvmId}/{filename}: delete: parameters: @@ -519,6 +546,30 @@ paths: - SecurityScheme: [] tags: - Recordings + /api/beta/recordings/{connectUrl}/{filename}/upload: + post: + parameters: + - in: path + name: connectUrl + required: true + schema: + type: string + - in: path + name: filename + required: true + schema: + type: string + responses: + "200": + description: OK + "401": + description: Not Authorized + "403": + description: Not Allowed + security: + - SecurityScheme: [] + tags: + - Recordings /api/beta/recordings/{jvmId}: get: parameters: @@ -1242,6 +1293,31 @@ paths: description: OK tags: - Discovery + /api/v2.2/graphql: + get: + responses: + "200": + description: OK + "401": + description: Not Authorized + "403": + description: Not Allowed + security: + - SecurityScheme: [] + tags: + - Graph QL + post: + responses: + "200": + description: OK + "401": + description: Not Authorized + "403": + description: Not Allowed + security: + - SecurityScheme: [] + tags: + - Graph QL /api/v2/rules: get: responses: diff --git a/schema/schema.graphql b/schema/schema.graphql new file mode 100644 index 000000000..69284ffef --- /dev/null +++ b/schema/schema.graphql @@ -0,0 +1,304 @@ +type ActiveRecording { + continuous: Boolean! + "Archive the specified Flight Recording" + doArchive: ArchivedRecording + "Delete the specified Flight Recording" + doDelete: ActiveRecording + "Updates the metadata labels for an existing Flight Recording." + doPutMetadata(metadataInput: MetadataLabelsInput): ActiveRecording + "Stop the specified Flight Recording" + doStop: ActiveRecording + "URL for GET request to retrieve the JFR binary file content of this recording" + downloadUrl: String + duration: BigInteger! + id: BigInteger + maxAge: BigInteger! + maxSize: BigInteger! + metadata: Metadata! + name: String! + remoteId: BigInteger! + "URL for GET request to retrieve a JSON formatted Automated Analysis Report of this recording" + reportUrl: String + startTime: BigInteger! + state: RecordingState! + target: Target! + toDisk: Boolean! +} + +type ActiveRecordings { + aggregate: AggregateInfo! + data: [ActiveRecording]! +} + +type AggregateInfo { + "The number of elements in this collection" + count: BigInteger! + "The sum of sizes of elements in this collection, or 0 if not applicable" + size: BigInteger! +} + +type Annotations { + cryostat( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String] + platform( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String] +} + +type ArchivedRecording { + archivedTime: BigInteger! + doDelete: ArchivedRecording! + doPutMetadata(metadataInput: MetadataLabelsInput): ArchivedRecording! + "URL for GET request to retrieve the JFR binary file content of this recording" + downloadUrl: String + jvmId: String + metadata: Metadata + name: String + "URL for GET request to retrieve a JSON formatted Automated Analysis Report of this recording" + reportUrl: String + size: BigInteger! +} + +type ArchivedRecordings { + aggregate: AggregateInfo! + data: [ArchivedRecording]! +} + +type DiscoveryNode { + children: [DiscoveryNode] + "Get target nodes that are descendants of this node. That is, get the set of leaf nodes from anywhere below this node's subtree." + descendantTargets(filter: DiscoveryNodeFilterInput): [DiscoveryNode] + id: BigInteger + labels( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String]! + name: String! + nodeType: String! + target: Target +} + +type Entry_String_String { + key: String + value: String +} + +type MBeanMetrics { + jvmId: String + memory: MemoryMetrics + os: OperatingSystemMetrics + runtime: RuntimeMetrics + thread: ThreadMetrics +} + +type MemoryMetrics { + freeHeapMemory: BigInteger! + freeNonHeapMemory: BigInteger! + heapMemoryUsage: MemoryUtilization + heapMemoryUsagePercent: Float! + nonHeapMemoryUsage: MemoryUtilization + objectPendingFinalizationCount: BigInteger! + verbose: Boolean! +} + +type MemoryUtilization { + committed: BigInteger! + init: BigInteger! + max: BigInteger! + used: BigInteger! +} + +type Metadata { + "ISO-8601" + expiry: DateTime + labels( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String] +} + +"Mutation root" +type Mutation { + "Archive an existing Flight Recording matching the given filter, on all Targets under the subtrees of the discovery nodes matching the given filter" + archiveRecording(nodes: DiscoveryNodeFilterInput!, recordings: ActiveRecordingsFilterInput): [ArchivedRecording] + "Start a new Flight Recording on all Targets under the subtrees of the discovery nodes matching the given filter" + createRecording(nodes: DiscoveryNodeFilterInput!, recording: RecordingSettingsInput!): [ActiveRecording] + "Create a Flight Recorder Snapshot on all Targets under the subtrees of the discovery nodes matching the given filter" + createSnapshot(nodes: DiscoveryNodeFilterInput!): [ActiveRecording] + "Delete an existing Flight Recording matching the given filter, on all Targets under the subtrees of the discovery nodes matching the given filter" + deleteRecording(nodes: DiscoveryNodeFilterInput!, recordings: ActiveRecordingsFilterInput): [ActiveRecording] + "Stop an existing Flight Recording matching the given filter, on all Targets under the subtrees of the discovery nodes matching the given filter" + stopRecording(nodes: DiscoveryNodeFilterInput!, recordings: ActiveRecordingsFilterInput): [ActiveRecording] +} + +type OperatingSystemMetrics { + arch: String + availableProcessors: Int! + committedVirtualMemorySize: BigInteger! + freePhysicalMemorySize: BigInteger! + freeSwapSpaceSize: BigInteger! + name: String + processCpuLoad: Float! + processCpuTime: BigInteger! + systemCpuLoad: Float! + systemLoadAverage: Float! + totalPhysicalMemorySize: BigInteger! + totalSwapSpaceSize: BigInteger! + version: String +} + +"Query root" +type Query { + archivedRecordings(filter: ArchivedRecordingsFilterInput): ArchivedRecordings + "Get all environment nodes in the discovery tree with optional filtering" + environmentNodes(filter: DiscoveryNodeFilterInput): [DiscoveryNode] + "Get the root target discovery node" + rootNode: DiscoveryNode + "Get the Target discovery nodes, i.e. the leaf nodes of the discovery tree" + targetNodes(filter: DiscoveryNodeFilterInput): [DiscoveryNode] +} + +type Recordings { + active(filter: ActiveRecordingsFilterInput): ActiveRecordings + archived(filter: ArchivedRecordingsFilterInput): ArchivedRecordings +} + +type RuntimeMetrics { + bootClassPath: String + bootClassPathSupported: Boolean! + classPath: String + inputArguments: [String] + libraryPath: String + managementSpecVersion: String + name: String + specName: String + specVendor: String + specVersion: String + startTime: BigInteger! + systemProperties( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String] + uptime: BigInteger! + vmName: String + vmVendor: String + vmVersion: String +} + +type Target { + activeRecordings(filter: ActiveRecordingsFilterInput): ActiveRecordings + agent: Boolean! + alias: String! + annotations: Annotations! + archivedRecordings(filter: ArchivedRecordingsFilterInput): ArchivedRecordings + connectUrl: String! + "Create a new Flight Recorder Snapshot on the specified Target" + doSnapshot: ActiveRecording + "Start a new Flight Recording on the specified Target" + doStartRecording(recording: RecordingSettingsInput!): ActiveRecording + id: BigInteger + jvmId: String + labels( + "Get entry/entries for a certain key/s" + key: [String] + ): [Entry_String_String]! + "Get live MBean metrics snapshot from the specified Target" + mbeanMetrics: MBeanMetrics + "Get the active and archived recordings belonging to this target" + recordings: Recordings +} + +type ThreadMetrics { + allThreadIds: [BigInteger] + currentThreadCpuTime: BigInteger! + currentThreadCpuTimeSupported: Boolean! + currentThreadUserTime: BigInteger! + daemonThreadCount: Int! + objectMonitorUsageSupported: Boolean! + peakThreadCount: Int! + synchronizerUsageSupported: Boolean! + threadContentionMonitoringEnabled: Boolean! + threadContentionMonitoringSupported: Boolean! + threadCount: Int! + threadCpuTimeEnabled: Boolean! + threadCpuTimeSupported: Boolean! + totalStartedThreadCount: BigInteger! +} + +"Running state of an active Flight Recording" +enum RecordingState { + "CLOSED" + CLOSED + "DELAYED" + DELAYED + "NEW" + NEW + "RUNNING" + RUNNING + "STOPPED" + STOPPED +} + +input ActiveRecordingsFilterInput { + continuous: Boolean + durationMsGreaterThanEqual: BigInteger + durationMsLessThanEqual: BigInteger + labels: [String] + name: String + names: [String] + startTimeMsAfterEqual: BigInteger + startTimeMsBeforeEqual: BigInteger + state: RecordingState + toDisk: Boolean +} + +input ArchivedRecordingsFilterInput { + archivedTimeAfterEqual: BigInteger + archivedTimeBeforeEqual: BigInteger + labels: [String] + name: String + names: [String] + sizeBytesGreaterThanEqual: BigInteger + sizeBytesLessThanEqual: BigInteger + sourceTarget: String +} + +input DiscoveryNodeFilterInput { + annotations: [String] + id: BigInteger + ids: [BigInteger] + labels: [String] + name: String + names: [String] + nodeTypes: [String] +} + +input Entry_String_StringInput { + key: String + value: String +} + +input MetadataLabelsInput { + labels: [Entry_String_StringInput] +} + +input RecordingMetadataInput { + labels: [Entry_String_StringInput] +} + +input RecordingSettingsInput { + archiveOnStop: Boolean + continuous: Boolean + duration: BigInteger + maxAge: BigInteger + maxSize: BigInteger + metadata: RecordingMetadataInput + name: String! + replace: String + template: String! + templateType: String! + toDisk: Boolean +} diff --git a/schema/update.bash b/schema/update.bash index 07d109017..6ace30cca 100755 --- a/schema/update.bash +++ b/schema/update.bash @@ -24,4 +24,4 @@ while true; do fi done wget http://localhost:8181/api -O - | yq -P 'sort_keys(..)' > "${DIR}/openapi.yaml" -# wget http://localhost:8181/api/v3/graphql/schema.graphql -O "${DIR}/schema.graphql" +wget http://localhost:8181/api/v3/graphql/schema.graphql -O "${DIR}/schema.graphql" diff --git a/src/main/java/io/cryostat/JsonRequestFilter.java b/src/main/java/io/cryostat/JsonRequestFilter.java index 36dec2f84..5c9109554 100644 --- a/src/main/java/io/cryostat/JsonRequestFilter.java +++ b/src/main/java/io/cryostat/JsonRequestFilter.java @@ -37,7 +37,12 @@ public class JsonRequestFilter implements ContainerRequestFilter { static final Set disallowedFields = Set.of("id"); static final Set allowedPathPatterns = - Set.of("/api/v2.2/discovery", "/api/v2/rules/[\\w]+", "/api/beta/matchExpressions"); + Set.of( + "/api/v2.2/discovery", + "/api/v2/rules/[\\w]+", + "/api/beta/matchExpressions", + "/api/v2.2/graphql", + "/api/v3/graphql"); private final Map compiledPatterns = new HashMap<>(); private final ObjectMapper objectMapper = new ObjectMapper(); diff --git a/src/main/java/io/cryostat/ObjectMapperCustomization.java b/src/main/java/io/cryostat/ObjectMapperCustomization.java new file mode 100644 index 000000000..7284b0282 --- /dev/null +++ b/src/main/java/io/cryostat/ObjectMapperCustomization.java @@ -0,0 +1,83 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat; + +import java.io.IOException; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.Version; +import com.fasterxml.jackson.databind.BeanDescription; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationConfig; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.ser.BeanSerializerModifier; +import com.fasterxml.jackson.databind.type.MapType; +import io.quarkus.jackson.ObjectMapperCustomizer; +import jakarta.inject.Singleton; + +@Singleton +public class ObjectMapperCustomization implements ObjectMapperCustomizer { + + @Override + public void customize(ObjectMapper objectMapper) { + // FIXME get this version information from the maven build somehow + SimpleModule mapModule = + new SimpleModule( + "MapSerialization", new Version(3, 0, 0, null, "io.cryostat", "cryostat")); + + mapModule.setSerializerModifier(new MapSerializerModifier()); + + objectMapper.registerModule(mapModule); + } + + static class MapSerializerModifier extends BeanSerializerModifier { + @Override + public JsonSerializer modifyMapSerializer( + SerializationConfig config, + MapType valueType, + BeanDescription beanDesc, + JsonSerializer serializer) { + if (valueType.getKeyType().getRawClass().equals(String.class) + && valueType.getContentType().getRawClass().equals(String.class)) { + return new MapSerializer(); + } + return serializer; + } + } + + static class MapSerializer extends JsonSerializer> { + + @Override + public void serialize(Map map, JsonGenerator gen, SerializerProvider serializers) + throws IOException { + gen.writeStartArray(); + + for (var entry : map.entrySet()) { + gen.writeStartObject(); + + gen.writePOJOField("key", entry.getKey()); + gen.writePOJOField("value", entry.getValue()); + + gen.writeEndObject(); + } + + gen.writeEndArray(); + } + } +} diff --git a/src/main/java/io/cryostat/graphql/ActiveRecordings.java b/src/main/java/io/cryostat/graphql/ActiveRecordings.java new file mode 100644 index 000000000..a78c3c490 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/ActiveRecordings.java @@ -0,0 +1,422 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Predicate; + +import org.openjdk.jmc.common.unit.QuantityConversionException; + +import io.cryostat.core.templates.Template; +import io.cryostat.core.templates.TemplateType; +import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; +import io.cryostat.graphql.TargetNodes.AggregateInfo; +import io.cryostat.graphql.TargetNodes.Recordings; +import io.cryostat.graphql.matchers.LabelSelectorMatcher; +import io.cryostat.recordings.ActiveRecording; +import io.cryostat.recordings.RecordingHelper; +import io.cryostat.recordings.RecordingHelper.RecordingOptions; +import io.cryostat.recordings.RecordingHelper.RecordingReplace; +import io.cryostat.recordings.Recordings.ArchivedRecording; +import io.cryostat.targets.Target; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.smallrye.common.annotation.Blocking; +import io.smallrye.graphql.api.Nullable; +import io.smallrye.graphql.execution.ExecutionException; +import io.smallrye.mutiny.Uni; +import jakarta.inject.Inject; +import jakarta.transaction.Transactional; +import jdk.jfr.RecordingState; +import org.eclipse.microprofile.graphql.Description; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Mutation; +import org.eclipse.microprofile.graphql.NonNull; +import org.eclipse.microprofile.graphql.Source; +import org.jboss.logging.Logger; + +@GraphQLApi +public class ActiveRecordings { + + @Inject RecordingHelper recordingHelper; + @Inject Logger logger; + + @Blocking + @Transactional + @Mutation + @Description( + "Start a new Flight Recording on all Targets under the subtrees of the discovery nodes" + + " matching the given filter") + public List createRecording( + @NonNull DiscoveryNodeFilter nodes, @NonNull RecordingSettings recording) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .map( + target -> { + var template = + recordingHelper.getPreferredTemplate( + target, + recording.template, + TemplateType.valueOf(recording.templateType)); + try { + return recordingHelper + .startRecording( + target, + Optional.ofNullable(recording.replace) + .map(RecordingReplace::valueOf) + .orElse(RecordingReplace.STOPPED), + template, + recording.asOptions(), + Optional.ofNullable(recording.metadata) + .map(s -> s.labels) + .orElse(Map.of())) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (QuantityConversionException qce) { + throw new ExecutionException(qce); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Archive an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List archiveRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter(r -> recordings == null || recordings.test(r))) + .map( + recording -> { + try { + return recordingHelper.archiveRecording(recording, null, null); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Stop an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List stopRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter(r -> recordings == null || recordings.test(r))) + .map( + recording -> { + try { + return recordingHelper + .stopRecording(recording) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Delete an existing Flight Recording matching the given filter, on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List deleteRecording( + @NonNull DiscoveryNodeFilter nodes, @Nullable ActiveRecordingsFilter recordings) { + var activeRecordings = + DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null) + .stream() + .map(n -> n.target)) + .flatMap( + t -> + t.activeRecordings.stream() + .filter( + r -> + recordings == null + || recordings.test(r))) + .toList(); + return activeRecordings.stream() + .map( + recording -> { + try { + return recordingHelper + .deleteRecording(recording) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Mutation + @Description( + "Create a Flight Recorder Snapshot on all Targets under" + + " the subtrees of the discovery nodes matching the given filter") + public List createSnapshot(@NonNull DiscoveryNodeFilter nodes) { + return DiscoveryNode.listAll().stream() + .filter(nodes) + .flatMap( + node -> + RootNode.recurseChildren(node, n -> n.target != null).stream() + .map(n -> n.target)) + .map( + target -> { + try { + return recordingHelper + .createSnapshot(target) + .await() + .atMost(Duration.ofSeconds(10)); + } catch (Exception e) { + throw new ExecutionException(e); + } + }) + .toList(); + } + + @Blocking + @Transactional + @Description("Start a new Flight Recording on the specified Target") + public Uni doStartRecording( + @Source Target target, @NonNull RecordingSettings recording) + throws QuantityConversionException { + var fTarget = Target.findById(target.id); + Template template = + recordingHelper.getPreferredTemplate( + fTarget, recording.template, TemplateType.valueOf(recording.templateType)); + return recordingHelper.startRecording( + fTarget, + Optional.ofNullable(recording.replace) + .map(RecordingReplace::valueOf) + .orElse(RecordingReplace.STOPPED), + template, + recording.asOptions(), + Optional.ofNullable(recording.metadata).map(s -> s.labels).orElse(Map.of())); + } + + @Blocking + @Transactional + @Description("Create a new Flight Recorder Snapshot on the specified Target") + public Uni doSnapshot(@Source Target target) { + var fTarget = Target.findById(target.id); + return recordingHelper.createSnapshot(fTarget); + } + + @Blocking + @Transactional + @Description("Stop the specified Flight Recording") + public Uni doStop(@Source ActiveRecording recording) { + var ar = ActiveRecording.findById(recording.id); + return recordingHelper.stopRecording(ar); + } + + @Blocking + @Transactional + @Description("Delete the specified Flight Recording") + public Uni doDelete(@Source ActiveRecording recording) { + var ar = ActiveRecording.findById(recording.id); + return recordingHelper.deleteRecording(ar); + } + + @Blocking + @Transactional + @Description("Archive the specified Flight Recording") + public Uni doArchive(@Source ActiveRecording recording) throws Exception { + var ar = ActiveRecording.findById(recording.id); + return Uni.createFrom().item(recordingHelper.archiveRecording(ar, null, null)); + } + + public TargetNodes.ActiveRecordings active( + @Source Recordings recordings, ActiveRecordingsFilter filter) { + var out = new TargetNodes.ActiveRecordings(); + out.data = new ArrayList<>(); + out.aggregate = AggregateInfo.empty(); + + var in = recordings.active; + if (in != null && in.data != null) { + out.data = + in.data.stream().filter(r -> filter == null ? true : filter.test(r)).toList(); + out.aggregate = AggregateInfo.fromActive(out.data); + } + + return out; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class RecordingSettings { + public @NonNull String name; + public @NonNull String template; + public @NonNull String templateType; + public @Nullable String replace; + public @Nullable Boolean continuous; + public @Nullable Boolean archiveOnStop; + public @Nullable Boolean toDisk; + public @Nullable Long duration; + public @Nullable Long maxSize; + public @Nullable Long maxAge; + public @Nullable RecordingMetadata metadata; + + public RecordingOptions asOptions() { + return new RecordingOptions( + name, + Optional.ofNullable(toDisk), + Optional.ofNullable(archiveOnStop), + Optional.ofNullable(duration), + Optional.ofNullable(maxSize), + Optional.ofNullable(maxAge)); + } + } + + @Blocking + @Transactional + @Description("Updates the metadata labels for an existing Flight Recording.") + public Uni doPutMetadata( + @Source ActiveRecording recording, MetadataLabels metadataInput) { + return Uni.createFrom() + .item( + () -> { + return recordingHelper.updateRecordingMetadata( + recording.id, metadataInput.getLabels()); + }); + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class MetadataLabels { + + private Map labels; + + public MetadataLabels() {} + + public MetadataLabels(Map labels) { + this.labels = new HashMap<>(labels); + } + + public Map getLabels() { + return new HashMap<>(labels); + } + + public void setLabels(Map labels) { + this.labels = new HashMap<>(labels); + } + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class RecordingMetadata { + public @Nullable Map labels; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class ActiveRecordingsFilter implements Predicate { + public @Nullable String name; + public @Nullable List names; + public @Nullable List labels; + public @Nullable RecordingState state; + public @Nullable Boolean continuous; + public @Nullable Boolean toDisk; + public @Nullable Long durationMsGreaterThanEqual; + public @Nullable Long durationMsLessThanEqual; + public @Nullable Long startTimeMsAfterEqual; + public @Nullable Long startTimeMsBeforeEqual; + + @Override + public boolean test(ActiveRecording r) { + Predicate matchesName = + n -> name == null || Objects.equals(name, n.name); + Predicate matchesNames = n -> names == null || names.contains(n.name); + Predicate matchesLabels = + n -> + labels == null + || labels.stream() + .allMatch( + label -> + LabelSelectorMatcher.parse(label) + .test(n.metadata.labels())); + Predicate matchesState = n -> state == null || n.state.equals(state); + Predicate matchesContinuous = + n -> continuous == null || continuous.equals(n.continuous); + Predicate matchesToDisk = + n -> toDisk == null || toDisk.equals(n.toDisk); + Predicate matchesDurationGte = + n -> + durationMsGreaterThanEqual == null + || durationMsGreaterThanEqual >= n.duration; + Predicate matchesDurationLte = + n -> durationMsLessThanEqual == null || durationMsLessThanEqual <= n.duration; + Predicate matchesStartTimeAfter = + n -> startTimeMsAfterEqual == null || startTimeMsAfterEqual >= n.startTime; + Predicate matchesStartTimeBefore = + n -> startTimeMsBeforeEqual == null || startTimeMsBeforeEqual <= n.startTime; + + return List.of( + matchesName, + matchesNames, + matchesLabels, + matchesState, + matchesContinuous, + matchesToDisk, + matchesDurationGte, + matchesDurationLte, + matchesStartTimeBefore, + matchesStartTimeAfter) + .stream() + .reduce(x -> true, Predicate::and) + .test(r); + } + } +} diff --git a/src/main/java/io/cryostat/graphql/ArchivedRecordings.java b/src/main/java/io/cryostat/graphql/ArchivedRecordings.java new file mode 100644 index 000000000..e62a40a5e --- /dev/null +++ b/src/main/java/io/cryostat/graphql/ArchivedRecordings.java @@ -0,0 +1,157 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.function.Predicate; + +import io.cryostat.graphql.ActiveRecordings.MetadataLabels; +import io.cryostat.graphql.TargetNodes.AggregateInfo; +import io.cryostat.graphql.TargetNodes.Recordings; +import io.cryostat.graphql.matchers.LabelSelectorMatcher; +import io.cryostat.recordings.RecordingHelper; +import io.cryostat.recordings.Recordings.ArchivedRecording; +import io.cryostat.recordings.Recordings.Metadata; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.smallrye.common.annotation.Blocking; +import io.smallrye.graphql.api.Nullable; +import jakarta.inject.Inject; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.NonNull; +import org.eclipse.microprofile.graphql.Query; +import org.eclipse.microprofile.graphql.Source; + +@GraphQLApi +public class ArchivedRecordings { + + @Inject RecordingHelper recordingHelper; + + @Blocking + @Query("archivedRecordings") + public TargetNodes.ArchivedRecordings listArchivedRecordings(ArchivedRecordingsFilter filter) { + var r = new TargetNodes.ArchivedRecordings(); + r.data = + recordingHelper + .listArchivedRecordings(filter == null ? null : filter.sourceTarget) + .stream() + .filter(filter) + .toList(); + r.aggregate = AggregateInfo.fromArchived(r.data); + return r; + } + + public TargetNodes.ArchivedRecordings archived( + @Source Recordings recordings, ArchivedRecordingsFilter filter) { + var out = new TargetNodes.ArchivedRecordings(); + out.data = new ArrayList<>(); + out.aggregate = AggregateInfo.empty(); + + var in = recordings.archived; + if (in != null && in.data != null) { + out.data = + in.data.stream().filter(r -> filter == null ? true : filter.test(r)).toList(); + out.aggregate = AggregateInfo.fromArchived(out.data); + } + + return out; + } + + @NonNull + public ArchivedRecording doDelete(@Source ArchivedRecording recording) { + recordingHelper.deleteArchivedRecording(recording.jvmId(), recording.name()); + return recording; + } + + @NonNull + public ArchivedRecording doPutMetadata( + @Source ArchivedRecording recording, MetadataLabels metadataInput) { + recordingHelper.updateArchivedRecordingMetadata( + recording.jvmId(), recording.name(), metadataInput.getLabels()); + + String downloadUrl = recordingHelper.downloadUrl(recording.jvmId(), recording.name()); + String reportUrl = recordingHelper.reportUrl(recording.jvmId(), recording.name()); + + return new ArchivedRecording( + recording.jvmId(), + recording.name(), + downloadUrl, + reportUrl, + new Metadata(metadataInput.getLabels()), + recording.size(), + recording.archivedTime()); + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class ArchivedRecordingsFilter implements Predicate { + public @Nullable String name; + public @Nullable List names; + public @Nullable String sourceTarget; + public @Nullable List labels; + public @Nullable Long sizeBytesGreaterThanEqual; + public @Nullable Long sizeBytesLessThanEqual; + public @Nullable Long archivedTimeAfterEqual; + public @Nullable Long archivedTimeBeforeEqual; + + @Override + public boolean test(ArchivedRecording r) { + Predicate matchesName = + n -> name == null || Objects.equals(name, n.name()); + Predicate matchesNames = + n -> names == null || names.contains(n.name()); + Predicate matchesSourceTarget = + n -> + sourceTarget == null + || Objects.equals( + r.metadata().labels().get("connectUrl"), sourceTarget); + Predicate matchesLabels = + n -> + labels == null + || labels.stream() + .allMatch( + label -> + LabelSelectorMatcher.parse(label) + .test(n.metadata().labels())); + Predicate matchesSizeGte = + n -> sizeBytesGreaterThanEqual == null || sizeBytesGreaterThanEqual >= n.size(); + Predicate matchesSizeLte = + n -> sizeBytesLessThanEqual == null || sizeBytesLessThanEqual <= n.size(); + Predicate matchesArchivedTimeGte = + n -> + archivedTimeAfterEqual == null + || archivedTimeAfterEqual >= n.archivedTime(); + Predicate matchesArchivedTimeLte = + n -> + archivedTimeBeforeEqual == null + || archivedTimeBeforeEqual <= n.archivedTime(); + + return List.of( + matchesName, + matchesNames, + matchesSourceTarget, + matchesLabels, + matchesSizeGte, + matchesSizeLte, + matchesArchivedTimeGte, + matchesArchivedTimeLte) + .stream() + .reduce(x -> true, Predicate::and) + .test(r); + } + } +} diff --git a/src/main/java/io/cryostat/graphql/EnvironmentNodes.java b/src/main/java/io/cryostat/graphql/EnvironmentNodes.java new file mode 100644 index 000000000..d70859989 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/EnvironmentNodes.java @@ -0,0 +1,39 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.List; + +import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; + +import io.smallrye.graphql.api.Nullable; +import org.eclipse.microprofile.graphql.Description; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Query; + +@GraphQLApi +public class EnvironmentNodes { + + @Query("environmentNodes") + @Description("Get all environment nodes in the discovery tree with optional filtering") + public List environmentNodes(@Nullable DiscoveryNodeFilter filter) { + return RootNode.recurseChildren(DiscoveryNode.getUniverse(), node -> node.target == null) + .stream() + .filter(filter) + .toList(); + } +} diff --git a/src/main/java/io/cryostat/graphql/GraphQL.java b/src/main/java/io/cryostat/graphql/GraphQL.java new file mode 100644 index 000000000..3f15edf48 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/GraphQL.java @@ -0,0 +1,63 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.net.URI; + +import jakarta.annotation.security.RolesAllowed; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriInfo; +import org.apache.hc.core5.net.URIBuilder; +import org.jboss.resteasy.reactive.RestResponse; + +@Path("") +public class GraphQL { + + @GET + @Path("/api/v2.2/graphql") + @RolesAllowed("write") + public Response redirectGet(UriInfo info) throws Exception { + var uriBuilder = new URIBuilder(); + info.getQueryParameters() + .entrySet() + .forEach( + entry -> { + if (entry.getValue().size() != 1) { + return; + } + uriBuilder.addParameter(entry.getKey(), entry.getValue().get(0)); + }); + return Response.status(RestResponse.Status.PERMANENT_REDIRECT) + .location( + URI.create( + String.format( + "/api/v3/graphql?%s", + String.join("&", uriBuilder.build().getRawQuery())))) + .build(); + } + + @POST + @Path("/api/v2.2/graphql") + @RolesAllowed("write") + public Response redirectPost() throws Exception { + return Response.status(RestResponse.Status.PERMANENT_REDIRECT) + .location(URI.create("/api/v3/graphql")) + .build(); + } +} diff --git a/src/main/java/io/cryostat/graphql/RecordingLinks.java b/src/main/java/io/cryostat/graphql/RecordingLinks.java new file mode 100644 index 000000000..fbc732d55 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/RecordingLinks.java @@ -0,0 +1,55 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import io.cryostat.recordings.ActiveRecording; +import io.cryostat.recordings.RecordingHelper; +import io.cryostat.recordings.Recordings.ArchivedRecording; + +import jakarta.inject.Inject; +import org.eclipse.microprofile.graphql.Description; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Source; + +@GraphQLApi +public class RecordingLinks { + + @Inject RecordingHelper recordingHelper; + + @Description("URL for GET request to retrieve the JFR binary file content of this recording") + public String downloadUrl(@Source ActiveRecording recording) { + return recordingHelper.downloadUrl(recording); + } + + @Description( + "URL for GET request to retrieve a JSON formatted Automated Analysis Report of this" + + " recording") + public String reportUrl(@Source ActiveRecording recording) { + return recordingHelper.reportUrl(recording); + } + + @Description("URL for GET request to retrieve the JFR binary file content of this recording") + public String downloadUrl(@Source ArchivedRecording recording) { + return recording.downloadUrl(); + } + + @Description( + "URL for GET request to retrieve a JSON formatted Automated Analysis Report of this" + + " recording") + public String reportUrl(@Source ArchivedRecording recording) { + return recording.reportUrl(); + } +} diff --git a/src/main/java/io/cryostat/graphql/RootNode.java b/src/main/java/io/cryostat/graphql/RootNode.java new file mode 100644 index 000000000..ba5f5e74e --- /dev/null +++ b/src/main/java/io/cryostat/graphql/RootNode.java @@ -0,0 +1,117 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; + +import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.matchers.LabelSelectorMatcher; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.smallrye.graphql.api.Nullable; +import org.eclipse.microprofile.graphql.Description; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Query; +import org.eclipse.microprofile.graphql.Source; + +@GraphQLApi +public class RootNode { + + @Query("rootNode") + @Description("Get the root target discovery node") + public DiscoveryNode getRootNode() { + return DiscoveryNode.getUniverse(); + } + + @Description( + "Get target nodes that are descendants of this node. That is, get the set of leaf nodes" + + " from anywhere below this node's subtree.") + public List descendantTargets( + @Source DiscoveryNode discoveryNode, DiscoveryNodeFilter filter) { + // TODO do this filtering at the database query level as much as possible. As is, this will + // load the entire discovery tree out of the database, then perform the filtering at the + // application level. + return recurseChildren(discoveryNode, n -> n.target != null).stream() + .filter(n -> filter == null ? true : filter.test(n)) + .toList(); + } + + static Set recurseChildren( + DiscoveryNode node, Predicate predicate) { + Set result = new HashSet<>(); + if (predicate.test(node)) { + result.add(node); + } + if (node.children != null) { + node.children.forEach(c -> result.addAll(recurseChildren(c, predicate))); + } + return result; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class DiscoveryNodeFilter implements Predicate { + public @Nullable Long id; + public @Nullable List ids; + public @Nullable String name; + public @Nullable List names; + public @Nullable List nodeTypes; + public @Nullable List labels; + public @Nullable List annotations; + + @Override + public boolean test(DiscoveryNode t) { + Predicate matchesId = n -> id == null || id.equals(n.id); + Predicate matchesIds = n -> ids == null || ids.contains(n.id); + Predicate matchesName = n -> name == null || name.equals(n.name); + Predicate matchesNames = n -> names == null || names.contains(n.name); + Predicate matchesNodeTypes = + n -> nodeTypes == null || nodeTypes.contains(n.nodeType); + Predicate matchesLabels = + n -> + labels == null + || labels.stream() + .allMatch( + label -> + LabelSelectorMatcher.parse(label) + .test(n.labels)); + Predicate matchesAnnotations = + n -> + annotations == null + || annotations.stream() + .allMatch( + annotation -> + LabelSelectorMatcher.parse(annotation) + .test( + n.target.annotations + .merged())); + + return List.of( + matchesId, + matchesIds, + matchesName, + matchesNames, + matchesNodeTypes, + matchesLabels, + matchesAnnotations) + .stream() + .reduce(x -> true, Predicate::and) + .test(t); + } + } +} diff --git a/src/main/java/io/cryostat/graphql/SchemaExtension.java b/src/main/java/io/cryostat/graphql/SchemaExtension.java new file mode 100644 index 000000000..c330a057e --- /dev/null +++ b/src/main/java/io/cryostat/graphql/SchemaExtension.java @@ -0,0 +1,54 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.Arrays; + +import graphql.schema.GraphQLEnumType; +import graphql.schema.GraphQLEnumValueDefinition; +import graphql.schema.GraphQLSchema; +import jakarta.enterprise.event.Observes; +import jdk.jfr.RecordingState; +import org.eclipse.microprofile.graphql.GraphQLApi; + +@GraphQLApi +public class SchemaExtension { + + public GraphQLSchema.Builder registerRecordingStateEnum( + @Observes GraphQLSchema.Builder builder) { + return createEnumType( + builder, RecordingState.class, "Running state of an active Flight Recording"); + } + + private static GraphQLSchema.Builder createEnumType( + GraphQLSchema.Builder builder, Class> klazz, String description) { + return builder.additionalType( + GraphQLEnumType.newEnum() + .name(klazz.getSimpleName()) + .description(description) + .values( + Arrays.asList(klazz.getEnumConstants()).stream() + .map( + s -> + new GraphQLEnumValueDefinition.Builder() + .name(s.name()) + .value(s) + .description(s.name()) + .build()) + .toList()) + .build()); + } +} diff --git a/src/main/java/io/cryostat/graphql/TargetNodes.java b/src/main/java/io/cryostat/graphql/TargetNodes.java new file mode 100644 index 000000000..321653955 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/TargetNodes.java @@ -0,0 +1,175 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql; + +import java.util.List; + +import io.cryostat.core.net.JFRConnection; +import io.cryostat.core.net.MBeanMetrics; +import io.cryostat.discovery.DiscoveryNode; +import io.cryostat.graphql.ActiveRecordings.ActiveRecordingsFilter; +import io.cryostat.graphql.ArchivedRecordings.ArchivedRecordingsFilter; +import io.cryostat.graphql.RootNode.DiscoveryNodeFilter; +import io.cryostat.recordings.ActiveRecording; +import io.cryostat.recordings.RecordingHelper; +import io.cryostat.recordings.Recordings.ArchivedRecording; +import io.cryostat.targets.Target; +import io.cryostat.targets.TargetConnectionManager; + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import graphql.schema.DataFetchingEnvironment; +import io.smallrye.common.annotation.Blocking; +import io.smallrye.graphql.api.Context; +import io.smallrye.graphql.api.Nullable; +import io.smallrye.mutiny.Uni; +import jakarta.inject.Inject; +import org.eclipse.microprofile.graphql.Description; +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.NonNull; +import org.eclipse.microprofile.graphql.Query; +import org.eclipse.microprofile.graphql.Source; + +@GraphQLApi +public class TargetNodes { + + @Inject RecordingHelper recordingHelper; + @Inject TargetConnectionManager connectionManager; + + @Blocking + @Query("targetNodes") + @Description("Get the Target discovery nodes, i.e. the leaf nodes of the discovery tree") + public List getTargetNodes(DiscoveryNodeFilter filter) { + // TODO do this filtering at the database query level as much as possible. As is, this will + // load the entire discovery tree out of the database, then perform the filtering at the + // application level. + return Target.findAll().stream() + // FIXME filtering by distinct JVM ID breaks clients that expect to be able to use a + // different connection URL (in the node filter or for client-side filtering) than + // the one we end up selecting for here. + // .filter(distinctWith(t -> t.jvmId)) + .map(t -> t.discoveryNode) + .filter(n -> filter == null ? true : filter.test(n)) + .toList(); + } + + // private static Predicate distinctWith(Function fn) { + // Set observed = ConcurrentHashMap.newKeySet(); + // return t -> observed.add(fn.apply(t)); + // } + + @Blocking + public ActiveRecordings activeRecordings( + @Source Target target, @Nullable ActiveRecordingsFilter filter) { + var fTarget = Target.findById(target.id); + var recordings = new ActiveRecordings(); + recordings.data = + fTarget.activeRecordings.stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromActive(recordings.data); + return recordings; + } + + @Blocking + public ArchivedRecordings archivedRecordings( + @Source Target target, @Nullable ArchivedRecordingsFilter filter) { + var fTarget = Target.findById(target.id); + var recordings = new ArchivedRecordings(); + recordings.data = + recordingHelper.listArchivedRecordings(fTarget).stream() + .filter(r -> filter == null || filter.test(r)) + .toList(); + recordings.aggregate = AggregateInfo.fromArchived(recordings.data); + return recordings; + } + + @Blocking + @Description("Get the active and archived recordings belonging to this target") + public Recordings recordings(@Source Target target, Context context) { + var fTarget = Target.findById(target.id); + var dfe = context.unwrap(DataFetchingEnvironment.class); + var requestedFields = + dfe.getSelectionSet().getFields().stream().map(field -> field.getName()).toList(); + + var recordings = new Recordings(); + + if (requestedFields.contains("active")) { + recordings.active = new ActiveRecordings(); + recordings.active.data = fTarget.activeRecordings; + recordings.active.aggregate = AggregateInfo.fromActive(recordings.active.data); + } + + if (requestedFields.contains("archived")) { + recordings.archived = new ArchivedRecordings(); + recordings.archived.data = recordingHelper.listArchivedRecordings(fTarget); + recordings.archived.aggregate = AggregateInfo.fromArchived(recordings.archived.data); + } + + return recordings; + } + + @Blocking + @Description("Get live MBean metrics snapshot from the specified Target") + public Uni mbeanMetrics(@Source Target target) { + var fTarget = Target.findById(target.id); + return connectionManager.executeConnectedTaskUni(fTarget, JFRConnection::getMBeanMetrics); + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class Recordings { + public @NonNull ActiveRecordings active; + public @NonNull ArchivedRecordings archived; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class ActiveRecordings { + public @NonNull List data; + public @NonNull AggregateInfo aggregate; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class ArchivedRecordings { + public @NonNull List data; + public @NonNull AggregateInfo aggregate; + } + + @SuppressFBWarnings(value = "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") + public static class AggregateInfo { + public @NonNull @Description("The number of elements in this collection") long count; + public @NonNull @Description( + "The sum of sizes of elements in this collection, or 0 if not applicable") long + size; + + private AggregateInfo(long count, long size) { + this.count = count; + this.size = size; + } + + public static AggregateInfo empty() { + return new AggregateInfo(0, 0); + } + + public static AggregateInfo fromActive(List recordings) { + return new AggregateInfo(recordings.size(), 0); + } + + public static AggregateInfo fromArchived(List recordings) { + return new AggregateInfo( + recordings.size(), + recordings.stream().mapToLong(ArchivedRecording::size).sum()); + } + } +} diff --git a/src/main/java/io/cryostat/graphql/matchers/EqualityMatcher.java b/src/main/java/io/cryostat/graphql/matchers/EqualityMatcher.java new file mode 100644 index 000000000..aea772c5c --- /dev/null +++ b/src/main/java/io/cryostat/graphql/matchers/EqualityMatcher.java @@ -0,0 +1,71 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql.matchers; + +import java.util.Objects; +import java.util.function.Function; +import java.util.function.Predicate; + +public class EqualityMatcher implements LabelMatcher { + + private final String key; + private final EqualityMatcher.Operator operator; + private final String value; + + EqualityMatcher(String key, EqualityMatcher.Operator operator, String value) { + this.key = key; + this.operator = operator; + this.value = value; + } + + @Override + public String getKey() { + return key; + } + + @Override + public boolean test(String s) { + return operator.with(value).test(s); + } + + public enum Operator { + EQUAL("=", arg -> v -> Objects.equals(arg, v)), + DOUBLE_EQUAL("==", arg -> v -> Objects.equals(arg, v)), + NOT_EQUAL("!=", arg -> v -> !Objects.equals(arg, v)), + ; + + private final String token; + private final Function> fn; + + Operator(String token, Function> fn) { + this.token = token; + this.fn = fn; + } + + Predicate with(String value) { + return fn.apply(value); + } + + public static Operator fromString(String str) { + for (Operator op : Operator.values()) { + if (op.token.equals(str)) { + return op; + } + } + return null; + } + } +} diff --git a/src/main/java/io/cryostat/graphql/matchers/LabelMatcher.java b/src/main/java/io/cryostat/graphql/matchers/LabelMatcher.java new file mode 100644 index 000000000..1d8735f81 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/matchers/LabelMatcher.java @@ -0,0 +1,22 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql.matchers; + +import java.util.function.Predicate; + +interface LabelMatcher extends Predicate { + String getKey(); +} diff --git a/src/main/java/io/cryostat/graphql/matchers/LabelSelectorMatcher.java b/src/main/java/io/cryostat/graphql/matchers/LabelSelectorMatcher.java new file mode 100644 index 000000000..9f01582f6 --- /dev/null +++ b/src/main/java/io/cryostat/graphql/matchers/LabelSelectorMatcher.java @@ -0,0 +1,121 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql.matchers; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public class LabelSelectorMatcher implements Predicate> { + + // ex. "my.prefix/label = something". Whitespaces around the operator are ignored. Left side + // must loosely look like a k8s label (not strictly enforced here), right side must loosely look + // like a k8s label value, which may be empty. Allowed operators are "=", "==", "!=". + static final Pattern EQUALITY_PATTERN = + Pattern.compile("^(?[^!=\\s]+)\\s*(?=|==|!=)\\s*(?[^!=\\s]*)$"); + + // ex. "environment in (production, qa)" or "tier NotIn (frontend, backend)". Tests if the given + // label has or does not have any of the specified values. + static final Pattern SET_MEMBER_PATTERN = + Pattern.compile( + "(?\\S+)\\s+(?in|notin)\\s+\\((?.+)\\)", + Pattern.CASE_INSENSITIVE); + + // ex. "mykey" or "!mykey". Tests whether the given key name exists in the test label set as a + // key, with or without a value. + static final Pattern SET_EXISTENCE_PATTERN = + Pattern.compile("^(?!?)(?\\S+)$", Pattern.MULTILINE); + + private final List matchers = new ArrayList<>(); + + private LabelSelectorMatcher() { + this(List.of()); + } + + private LabelSelectorMatcher(Collection matchers) { + this.matchers.addAll(matchers); + } + + @Override + public boolean test(Map labels) { + return this.matchers.stream().allMatch(m -> m.test(labels.get(m.getKey()))); + } + + public static LabelSelectorMatcher parse(String clause) throws IllegalArgumentException { + Collection> parsers = + Arrays.asList( + LabelSelectorMatcher::parseEqualities, + LabelSelectorMatcher::parseSetMemberships, + LabelSelectorMatcher::parseSetExistences); + for (var parser : parsers) { + LabelMatcher matcher = parser.apply(clause); + if (matcher != null) { + return new LabelSelectorMatcher(List.of(matcher)); + } + } + return new LabelSelectorMatcher(); + } + + private static LabelMatcher parseEqualities(String clause) { + Matcher m = EQUALITY_PATTERN.matcher(clause); + if (!m.matches()) { + return null; + } + String key = m.group("key"); + String op = m.group("op"); + EqualityMatcher.Operator operator = EqualityMatcher.Operator.fromString(op); + Objects.requireNonNull(operator, "Unknown equality operator " + op); + String value = m.group("value"); + return new EqualityMatcher(key, operator, value); + } + + private static LabelMatcher parseSetMemberships(String clause) { + Matcher m = SET_MEMBER_PATTERN.matcher(clause); + if (!m.matches()) { + return null; + } + String key = m.group("key"); + String op = m.group("op"); + SetMatcher.Operator operator = SetMatcher.Operator.fromString(op); + Objects.requireNonNull(operator, "Unknown set operator " + op); + String value = m.group("values"); + List values = + Arrays.asList(value.split(",")).stream() + .map(String::trim) + .collect(Collectors.toList()); + return new SetMatcher(key, operator, values); + } + + private static LabelMatcher parseSetExistences(String clause) { + Matcher m = SET_EXISTENCE_PATTERN.matcher(clause); + if (!m.matches()) { + return null; + } + String key = m.group("key"); + String op = m.group("op"); + SetMatcher.Operator operator = SetMatcher.Operator.fromString(op); + Objects.requireNonNull(operator, "Unknown set operator " + op); + return new SetMatcher(key, operator); + } +} diff --git a/src/main/java/io/cryostat/graphql/matchers/SetMatcher.java b/src/main/java/io/cryostat/graphql/matchers/SetMatcher.java new file mode 100644 index 000000000..1e0d69f4d --- /dev/null +++ b/src/main/java/io/cryostat/graphql/matchers/SetMatcher.java @@ -0,0 +1,82 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.cryostat.graphql.matchers; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; + +public class SetMatcher implements LabelMatcher { + + private final SetMatcher.Operator operator; + private final String key; + private final Set values; + + SetMatcher(String key, SetMatcher.Operator operator) { + this(key, operator, Set.of()); + } + + SetMatcher(String key, SetMatcher.Operator operator, Collection values) { + this.key = key; + this.operator = operator; + this.values = new HashSet<>(values); + } + + @Override + public String getKey() { + return key; + } + + @Override + public boolean test(String s) { + return operator.with(values).test(s); + } + + public enum Operator { + IN("In", args -> v -> contains(args, v)), + NOT_IN("NotIn", args -> v -> !contains(args, v)), + EXISTS("", args -> v -> v != null), + DOES_NOT_EXIST("!", args -> v -> v == null), + ; + + private final String token; + private final Function, Predicate> fn; + + Operator(String token, Function, Predicate> fn) { + this.token = token; + this.fn = fn; + } + + Predicate with(Collection values) { + return fn.apply(values); + } + + public static Operator fromString(String str) { + for (Operator op : Operator.values()) { + if (op.token.equalsIgnoreCase(str)) { + return op; + } + } + return null; + } + + private static boolean contains(Collection args, String v) { + return args.stream().anyMatch(s -> s.equals(v)); + } + } +} diff --git a/src/main/java/io/cryostat/recordings/ActiveRecording.java b/src/main/java/io/cryostat/recordings/ActiveRecording.java index d52aaef36..9ff46c66e 100644 --- a/src/main/java/io/cryostat/recordings/ActiveRecording.java +++ b/src/main/java/io/cryostat/recordings/ActiveRecording.java @@ -141,6 +141,10 @@ public static ActiveRecording getByName(String name) { return find("name", name).singleResult(); } + public void setMetadata(Metadata metadata) { + this.metadata = metadata; + } + @Transactional public static boolean deleteFromTarget(Target target, String recordingName) { Optional recording = @@ -282,16 +286,19 @@ public record ActiveRecordingEvent( Objects.requireNonNull(payload); } - public record Payload(String target, LinkedRecordingDescriptor recording) { + public record Payload( + String target, LinkedRecordingDescriptor recording, String jvmId) { public Payload { Objects.requireNonNull(target); Objects.requireNonNull(recording); + Objects.requireNonNull(jvmId); } public static Payload of(RecordingHelper helper, ActiveRecording recording) { return new Payload( recording.target.connectUrl.toString(), - helper.toExternalForm(recording)); + helper.toExternalForm(recording), + recording.target.jvmId); } } } diff --git a/src/main/java/io/cryostat/recordings/RecordingHelper.java b/src/main/java/io/cryostat/recordings/RecordingHelper.java index 506febebb..c5590e19a 100644 --- a/src/main/java/io/cryostat/recordings/RecordingHelper.java +++ b/src/main/java/io/cryostat/recordings/RecordingHelper.java @@ -39,11 +39,13 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.openjdk.jmc.common.unit.IConstrainedMap; +import org.openjdk.jmc.common.unit.QuantityConversionException; import org.openjdk.jmc.flightrecorder.configuration.events.EventOptionID; import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder; import org.openjdk.jmc.rjmx.services.jfr.IEventTypeInfo; @@ -84,7 +86,9 @@ import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import jakarta.inject.Named; +import jakarta.transaction.Transactional; import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.NotFoundException; import jakarta.ws.rs.ServerErrorException; import jdk.jfr.RecordingState; import org.apache.commons.codec.binary.Base64; @@ -105,8 +109,10 @@ import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; import software.amazon.awssdk.services.s3.model.GetObjectRequest; import software.amazon.awssdk.services.s3.model.GetObjectTaggingRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import software.amazon.awssdk.services.s3.model.ListObjectsV2Request; import software.amazon.awssdk.services.s3.model.NoSuchKeyException; +import software.amazon.awssdk.services.s3.model.PutObjectTaggingRequest; import software.amazon.awssdk.services.s3.model.S3Object; import software.amazon.awssdk.services.s3.model.Tag; import software.amazon.awssdk.services.s3.model.Tagging; @@ -132,6 +138,7 @@ public class RecordingHelper { @Inject EventOptionsBuilder.Factory eventOptionsBuilderFactory; @Inject TargetTemplateService.Factory targetTemplateServiceFactory; @Inject S3TemplateService customTemplateService; + @Inject RecordingHelper recordingHelper; @Inject @Named(Producers.BASE64_URL) @@ -192,108 +199,150 @@ void onStart(@Observes StartupEvent evt) { } } - public ActiveRecording startRecording( + @Transactional + public Uni startRecording( Target target, - IConstrainedMap recordingOptions, - Template eventTemplate, - Metadata metadata, - boolean archiveOnStop, RecordingReplace replace, - JFRConnection connection) - throws Exception { - String recordingName = (String) recordingOptions.get(RecordingOptionsBuilder.KEY_NAME); - getDescriptorByName(connection, recordingName) - .ifPresent( - previous -> { - RecordingState previousState = mapState(previous); - boolean restart = - shouldRestartRecording(replace, previousState, recordingName); - if (!restart) { - throw new EntityExistsException("Recording", recordingName); - } - if (!ActiveRecording.deleteFromTarget(target, recordingName)) { - logger.warnf( - "Could not delete recording %s from target %s", - recordingName, target.alias); - } - }); - - IRecordingDescriptor desc = - connection - .getService() - .start(recordingOptions, enableEvents(target, eventTemplate)); - - Map labels = metadata.labels(); - labels.put("template.name", eventTemplate.getName()); - labels.put("template.type", eventTemplate.getType().toString()); - Metadata meta = new Metadata(labels); - - ActiveRecording recording = ActiveRecording.from(target, desc, meta); - recording.persist(); - - target.activeRecordings.add(recording); - target.persist(); - - logger.tracev("Started recording: {0} {1}", target.connectUrl, target.activeRecordings); - - return recording; + Template template, + RecordingOptions options, + Map rawLabels) + throws QuantityConversionException { + return connectionManager.executeConnectedTaskUni( + target, + conn -> { + RecordingOptionsBuilder optionsBuilder = + recordingOptionsBuilderFactory.create(target).name(options.name()); + if (options.duration().isPresent()) { + optionsBuilder = + optionsBuilder.duration( + TimeUnit.SECONDS.toMillis(options.duration().get())); + } + if (options.toDisk().isPresent()) { + optionsBuilder = optionsBuilder.toDisk(options.toDisk().get()); + } + if (options.maxAge().isPresent()) { + optionsBuilder = optionsBuilder.maxAge(options.maxAge().get()); + } + if (options.maxSize().isPresent()) { + optionsBuilder = optionsBuilder.maxSize(options.maxSize().get()); + } + IConstrainedMap recordingOptions = optionsBuilder.build(); + getDescriptorByName(conn, options.name()) + .ifPresent( + previous -> { + RecordingState previousState = mapState(previous); + boolean restart = + shouldRestartRecording( + replace, previousState, options.name()); + if (!restart) { + throw new EntityExistsException( + "Recording", options.name()); + } + if (!ActiveRecording.deleteFromTarget( + target, options.name())) { + logger.warnf( + "Could not delete recording %s from target %s", + options.name(), target.alias); + } + }); + + IRecordingDescriptor desc = + conn.getService() + .start(recordingOptions, enableEvents(target, template)); + + Map labels = new HashMap<>(rawLabels); + labels.put("template.name", template.getName()); + labels.put("template.type", template.getType().toString()); + Metadata meta = new Metadata(labels); + + ActiveRecording recording = ActiveRecording.from(target, desc, meta); + recording.persist(); + + target.activeRecordings.add(recording); + target.persist(); + + logger.tracev( + "Started recording: {0} {1}", + target.connectUrl, target.activeRecordings); + return recording; + }); } - public ActiveRecording createSnapshot(Target target, JFRConnection connection) - throws Exception { - IRecordingDescriptor desc = connection.getService().getSnapshotRecording(); - - String rename = String.format("%s-%d", desc.getName().toLowerCase(), desc.getId()); + public Uni createSnapshot(Target target) { + return connectionManager.executeConnectedTaskUni( + target, + connection -> { + IRecordingDescriptor desc = connection.getService().getSnapshotRecording(); - RecordingOptionsBuilder recordingOptionsBuilder = - recordingOptionsBuilderFactory.create(target); - recordingOptionsBuilder.name(rename); + String rename = + String.format("%s-%d", desc.getName().toLowerCase(), desc.getId()); - connection.getService().updateRecordingOptions(desc, recordingOptionsBuilder.build()); + RecordingOptionsBuilder recordingOptionsBuilder = + recordingOptionsBuilderFactory.create(target).name(rename); - Optional updatedDescriptor = getDescriptorByName(connection, rename); + connection + .getService() + .updateRecordingOptions(desc, recordingOptionsBuilder.build()); - if (updatedDescriptor.isEmpty()) { - throw new IllegalStateException( - "The most recent snapshot of the recording cannot be" - + " found after renaming."); - } + Optional updatedDescriptor = + getDescriptorByName(connection, rename); - desc = updatedDescriptor.get(); + if (updatedDescriptor.isEmpty()) { + throw new IllegalStateException( + "The most recent snapshot of the recording cannot be" + + " found after renaming."); + } - try (InputStream snapshot = remoteRecordingStreamFactory.open(connection, target, desc)) { - if (!snapshotIsReadable(target, snapshot)) { - connection.getService().close(desc); - throw new SnapshotCreationException( - "Snapshot was not readable - are there any source recordings?"); - } - } + desc = updatedDescriptor.get(); - ActiveRecording recording = - ActiveRecording.from( - target, - desc, - new Metadata( - Map.of( - "jvmId", - target.jvmId, - "connectUrl", - target.connectUrl.toString()))); - recording.persist(); + try (InputStream snapshot = + remoteRecordingStreamFactory.open(connection, target, desc)) { + if (!snapshotIsReadable(target, snapshot)) { + connection.getService().close(desc); + throw new SnapshotCreationException( + "Snapshot was not readable - are there any source recordings?"); + } + } - target.activeRecordings.add(recording); - target.persist(); + ActiveRecording recording = + ActiveRecording.from( + target, + desc, + new Metadata( + Map.of( + "jvmId", + target.jvmId, + "connectUrl", + target.connectUrl.toString()))); + recording.persist(); + + target.activeRecordings.add(recording); + target.persist(); + + var event = + new ActiveRecordingEvent( + Recordings.RecordingEventCategory.SNAPSHOT_CREATED, + ActiveRecordingEvent.Payload.of(this, recording)); + bus.publish(event.category().category(), event.payload().recording()); + bus.publish( + MessagingServer.class.getName(), + new Notification(event.category().category(), event.payload())); + + return recording; + }); + } - var event = - new ActiveRecordingEvent( - Recordings.RecordingEventCategory.SNAPSHOT_CREATED, - ActiveRecordingEvent.Payload.of(this, recording)); - bus.publish(event.category().category(), event.payload().recording()); - bus.publish( - MessagingServer.class.getName(), - new Notification(event.category().category(), event.payload())); + public Uni stopRecording(ActiveRecording recording) { + recording.state = RecordingState.STOPPED; + recording.persist(); + return Uni.createFrom().item(recording); + } - return recording; + @Transactional + public Uni deleteRecording(ActiveRecording recording) { + recording.delete(); + recording.persist(); + return Uni.createFrom().item(recording); } private boolean snapshotIsReadable(Target target, InputStream snapshot) throws IOException { @@ -395,7 +444,7 @@ private IConstrainedMap enableEvents(Target target, Template even } public Template getPreferredTemplate( - Target target, String templateName, TemplateType templateType) throws Exception { + Target target, String templateName, TemplateType templateType) { Objects.requireNonNull(target); Objects.requireNonNull(templateName); if (templateName.equals(EventTemplates.ALL_EVENTS_TEMPLATE.getName())) { @@ -488,6 +537,29 @@ public List listArchivedRecordingObjects() { return listArchivedRecordingObjects(null); } + public List listArchivedRecordings() { + return listArchivedRecordingObjects().stream() + .map( + item -> { + String path = item.key().strip(); + String[] parts = path.split("/"); + String jvmId = parts[0]; + String filename = parts[1]; + Metadata metadata = + getArchivedRecordingMetadata(jvmId, filename) + .orElseGet(Metadata::empty); + return new ArchivedRecording( + jvmId, + filename, + downloadUrl(jvmId, filename), + reportUrl(jvmId, filename), + metadata, + item.size(), + item.lastModified().getEpochSecond()); + }) + .toList(); + } + public List listArchivedRecordingObjects(String jvmId) { var builder = ListObjectsV2Request.builder().bucket(archiveBucket); if (StringUtils.isNotBlank(jvmId)) { @@ -503,33 +575,52 @@ public List listArchivedRecordingObjects(String jvmId) { .toList(); } - public String saveRecording(ActiveRecording recording) throws Exception { - return saveRecording(recording, null); + public List listArchivedRecordings(String jvmId) { + return listArchivedRecordingObjects(jvmId).stream() + .map( + item -> { + String path = item.key().strip(); + String[] parts = path.split("/"); + String filename = parts[1]; + Metadata metadata = + getArchivedRecordingMetadata(jvmId, filename) + .orElseGet(Metadata::empty); + return new ArchivedRecording( + jvmId, + filename, + downloadUrl(jvmId, filename), + reportUrl(jvmId, filename), + metadata, + item.size(), + item.lastModified().getEpochSecond()); + }) + .toList(); } - public String saveRecording(ActiveRecording recording, Instant expiry) throws Exception { - return saveRecording(recording, null, expiry); + public List listArchivedRecordings(Target target) { + return listArchivedRecordings(target.jvmId); } - public String saveRecording(ActiveRecording recording, String savename, Instant expiry) - throws Exception { + public ArchivedRecording archiveRecording( + ActiveRecording activeRecording, String savename, Instant expiry) throws Exception { // AWS object key name guidelines advise characters to avoid (% so we should not pass url // encoded characters) String transformedAlias = - URLDecoder.decode(recording.target.alias, StandardCharsets.UTF_8) + URLDecoder.decode(activeRecording.target.alias, StandardCharsets.UTF_8) .replaceAll("[\\._/]+", "-"); - String timestamp = - clock.now().truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", ""); + Instant now = clock.now(); + String timestamp = now.truncatedTo(ChronoUnit.SECONDS).toString().replaceAll("[-:]+", ""); String filename = - String.format("%s_%s_%s.jfr", transformedAlias, recording.name, timestamp); + String.format("%s_%s_%s.jfr", transformedAlias, activeRecording.name, timestamp); if (StringUtils.isBlank(savename)) { savename = filename; } int mib = 1024 * 1024; - String key = archivedRecordingKey(recording.target.jvmId, filename); + String key = archivedRecordingKey(activeRecording.target.jvmId, filename); String multipartId = null; List> parts = new ArrayList<>(); - try (var stream = remoteRecordingStreamFactory.open(recording); + long accum = 0; + try (var stream = remoteRecordingStreamFactory.open(activeRecording); var ch = Channels.newChannel(stream)) { ByteBuffer buf = ByteBuffer.allocate(20 * mib); CreateMultipartUploadRequest.Builder builder = @@ -539,14 +630,13 @@ public String saveRecording(ActiveRecording recording, String savename, Instant .contentType(JFR_MIME) .contentDisposition( String.format("attachment; filename=\"%s\"", savename)) - .tagging(createActiveRecordingTagging(recording, expiry)); + .tagging(createActiveRecordingTagging(activeRecording, expiry)); if (expiry != null && expiry.isAfter(Instant.now())) { builder = builder.expires(expiry); } CreateMultipartUploadRequest request = builder.build(); multipartId = storage.createMultipartUpload(request).uploadId(); int read = 0; - long accum = 0; for (int i = 1; i <= 10_000; i++) { read = ch.read(buf); @@ -627,13 +717,20 @@ public String saveRecording(ActiveRecording recording, String savename, Instant var event = new ActiveRecordingEvent( Recordings.RecordingEventCategory.ACTIVE_SAVED, - ActiveRecordingEvent.Payload.of(this, recording)); + ActiveRecordingEvent.Payload.of(this, activeRecording)); bus.publish(event.category().category(), event.payload().recording()); bus.publish( MessagingServer.class.getName(), new Notification(event.category().category(), event.payload())); } - return filename; + return new ArchivedRecording( + activeRecording.target.jvmId, + filename, + downloadUrl(activeRecording.target.jvmId, filename), + reportUrl(activeRecording.target.jvmId, filename), + activeRecording.metadata, + accum, + now.getEpochSecond()); } public Optional getArchivedRecordingMetadata(String jvmId, String filename) { @@ -760,6 +857,7 @@ public void deleteArchivedRecording(String jvmId, String filename) { ArchivedRecordingEvent.Payload.of( target.map(t -> t.connectUrl).orElse(null), new ArchivedRecording( + jvmId, filename, downloadUrl(jvmId, filename), reportUrl(jvmId, filename), @@ -836,6 +934,86 @@ private Metadata taggingToMetadata(List tagSet) { return new Metadata(labels, expiry); } + public ActiveRecording updateRecordingMetadata( + long recordingId, Map newLabels) { + ActiveRecording recording = ActiveRecording.findById(recordingId); + + if (recording == null) { + throw new NotFoundException("Recording not found for ID: " + recordingId); + } + + if (!recording.metadata.labels().equals(newLabels)) { + Metadata updatedMetadata = new Metadata(newLabels); + recording.setMetadata(updatedMetadata); + recording.persist(); + + notify( + new ActiveRecordingEvent( + Recordings.RecordingEventCategory.METADATA_UPDATED, + ActiveRecordingEvent.Payload.of(recordingHelper, recording))); + } + return recording; + } + + private void notify(ActiveRecordingEvent event) { + bus.publish( + MessagingServer.class.getName(), + new Notification(event.category().category(), event.payload())); + } + + public ArchivedRecording updateArchivedRecordingMetadata( + String jvmId, String filename, Map updatedLabels) { + String key = archivedRecordingKey(jvmId, filename); + Optional existingMetadataOpt = getArchivedRecordingMetadata(key); + + if (existingMetadataOpt.isEmpty()) { + throw new NotFoundException( + "Could not find metadata for archived recording with key: " + key); + } + + Metadata updatedMetadata = new Metadata(updatedLabels); + + Tagging tagging = createMetadataTagging(updatedMetadata); + storage.putObjectTagging( + PutObjectTaggingRequest.builder() + .bucket(archiveBucket) + .key(key) + .tagging(tagging) + .build()); + + var response = + storage.headObject( + HeadObjectRequest.builder().bucket(archiveBucket).key(key).build()); + long size = response.contentLength(); + Instant lastModified = response.lastModified(); + + ArchivedRecording updatedRecording = + new ArchivedRecording( + jvmId, + filename, + downloadUrl(jvmId, filename), + reportUrl(jvmId, filename), + updatedMetadata, + size, + lastModified.getEpochSecond()); + + notifyArchiveMetadataUpdate(updatedRecording); + return updatedRecording; + } + + private void notifyArchiveMetadataUpdate(ArchivedRecording updatedRecording) { + + var event = + new ArchivedRecordingEvent( + Recordings.RecordingEventCategory.METADATA_UPDATED, + new ArchivedRecordingEvent.Payload( + updatedRecording.downloadUrl(), updatedRecording)); + bus.publish(event.category().category(), event.payload().recording()); + bus.publish( + MessagingServer.class.getName(), + new Notification(event.category().category(), event.payload())); + } + public Uni uploadToJFRDatasource(long targetEntityId, long remoteId) throws Exception { Target target = Target.getTargetById(targetEntityId); Objects.requireNonNull(target, "Target from targetId not found"); @@ -929,6 +1107,14 @@ Optional getRecordingCopyPath( }); } + public record RecordingOptions( + String name, + Optional toDisk, + Optional archiveOnStop, + Optional duration, + Optional maxSize, + Optional maxAge) {} + public enum RecordingReplace { ALWAYS, NEVER, diff --git a/src/main/java/io/cryostat/recordings/Recordings.java b/src/main/java/io/cryostat/recordings/Recordings.java index 8eea2e436..545a72d88 100644 --- a/src/main/java/io/cryostat/recordings/Recordings.java +++ b/src/main/java/io/cryostat/recordings/Recordings.java @@ -52,6 +52,7 @@ import io.cryostat.core.templates.Template; import io.cryostat.core.templates.TemplateType; import io.cryostat.recordings.ActiveRecording.Listener.ArchivedRecordingEvent; +import io.cryostat.recordings.RecordingHelper.RecordingOptions; import io.cryostat.recordings.RecordingHelper.RecordingReplace; import io.cryostat.recordings.RecordingHelper.SnapshotCreationException; import io.cryostat.targets.Target; @@ -156,29 +157,7 @@ void onStart(@Observes StartupEvent evt) { @Path("/api/v1/recordings") @RolesAllowed("read") public List listArchivesV1() { - var result = new ArrayList(); - recordingHelper - .listArchivedRecordingObjects() - .forEach( - item -> { - String path = item.key().strip(); - String[] parts = path.split("/"); - String jvmId = parts[0]; - String filename = parts[1]; - Metadata metadata = - recordingHelper - .getArchivedRecordingMetadata(jvmId, filename) - .orElseGet(Metadata::empty); - result.add( - new ArchivedRecording( - filename, - recordingHelper.downloadUrl(jvmId, filename), - recordingHelper.reportUrl(jvmId, filename), - metadata, - item.size(), - item.lastModified().getEpochSecond())); - }); - return result; + return recordingHelper.listArchivedRecordings(); } @POST @@ -192,6 +171,8 @@ public Map upload( if (rawLabels != null) { rawLabels.getMap().forEach((k, v) -> labels.put(k, v.toString())); } + labels.put("jvmId", "uploads"); + labels.put("connectUrl", "uploads"); Metadata metadata = new Metadata(labels); return doUpload(recording, metadata, "uploads"); } @@ -241,6 +222,7 @@ public void agentPush( ArchivedRecordingEvent.Payload.of( target.map(t -> t.connectUrl).orElse(null), new ArchivedRecording( + jvmId, recording.fileName(), recordingHelper.downloadUrl(jvmId, recording.fileName()), recordingHelper.reportUrl(jvmId, recording.fileName()), @@ -294,6 +276,7 @@ public List agentGet(@RestPath String jvmId) { .orElseGet(Metadata::empty); result.add( new ArchivedRecording( + jvmId, filename, recordingHelper.downloadUrl(jvmId, filename), recordingHelper.reportUrl(jvmId, filename), @@ -357,6 +340,7 @@ Map doUpload(FileUpload recording, Metadata metadata, String jvm ArchivedRecordingEvent.Payload.of( target.map(t -> t.connectUrl).orElse(null), new ArchivedRecording( + jvmId, filename, recordingHelper.downloadUrl(jvmId, filename), recordingHelper.reportUrl(jvmId, filename), @@ -414,6 +398,45 @@ public Collection listFsArchives() { connectUrl, id, new ArrayList<>())); dir.recordings.add( new ArchivedRecording( + jvmId, + filename, + recordingHelper.downloadUrl(jvmId, filename), + recordingHelper.reportUrl(jvmId, filename), + metadata, + item.size(), + item.lastModified().getEpochSecond())); + }); + return map.values(); + } + + @GET + @Blocking + @Path("/api/beta/fs/recordings/{jvmId}") + @RolesAllowed("read") + public Collection listFsArchives(@RestPath String jvmId) { + var map = new HashMap(); + recordingHelper + .listArchivedRecordingObjects(jvmId) + .forEach( + item -> { + String filename = item.key().strip().replace(jvmId + "/", ""); + + Metadata metadata = + recordingHelper + .getArchivedRecordingMetadata(jvmId, filename) + .orElseGet(Metadata::empty); + + String connectUrl = + metadata.labels.computeIfAbsent("connectUrl", k -> jvmId); + var dir = + map.computeIfAbsent( + jvmId, + id -> + new ArchivedRecordingDirectory( + connectUrl, id, new ArrayList<>())); + dir.recordings.add( + new ArchivedRecording( + jvmId, filename, recordingHelper.downloadUrl(jvmId, filename), recordingHelper.reportUrl(jvmId, filename), @@ -460,8 +483,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod ActiveRecording activeRecording = recording.get(); switch (body.toLowerCase()) { case "stop": - activeRecording.state = RecordingState.STOPPED; - activeRecording.persist(); + recordingHelper.stopRecording(activeRecording).await().indefinitely(); return null; case "save": try { @@ -471,7 +493,7 @@ public String patch(@RestPath long targetId, @RestPath long remoteId, String bod // completes before sending a response - it should be async. Here we should just // return an Accepted response, and if a failure occurs that should be indicated // as a websocket notification. - return recordingHelper.saveRecording(activeRecording); + return recordingHelper.archiveRecording(activeRecording, null, null).name(); } catch (IOException ioe) { logger.warn(ioe); return null; @@ -510,9 +532,8 @@ public Response patchV1(@RestPath URI connectUrl, @RestPath String recordingName @RolesAllowed("write") public Uni createSnapshotV1(@RestPath URI connectUrl) throws Exception { Target target = Target.getTargetByConnectUrl(connectUrl); - return connectionManager - .executeConnectedTaskUni( - target, connection -> recordingHelper.createSnapshot(target, connection)) + return recordingHelper + .createSnapshot(target) .onItem() .transform( recording -> @@ -527,9 +548,8 @@ public Uni createSnapshotV1(@RestPath URI connectUrl) throws Exception @RolesAllowed("write") public Uni createSnapshotV2(@RestPath URI connectUrl) throws Exception { Target target = Target.getTargetByConnectUrl(connectUrl); - return connectionManager - .executeConnectedTaskUni( - target, connection -> recordingHelper.createSnapshot(target, connection)) + return recordingHelper + .createSnapshot(target) .onItem() .transform( recording -> @@ -552,9 +572,8 @@ public Uni createSnapshotV2(@RestPath URI connectUrl) throws Exception @RolesAllowed("write") public Uni createSnapshot(@RestPath long id) throws Exception { Target target = Target.find("id", id).singleResult(); - return connectionManager - .executeConnectedTaskUni( - target, connection -> recordingHelper.createSnapshot(target, connection)) + return recordingHelper + .createSnapshot(target) .onItem() .transform( recording -> @@ -598,50 +617,32 @@ public Response createRecording( Template template = recordingHelper.getPreferredTemplate(target, pair.getKey(), pair.getValue()); + Map labels = new HashMap<>(); + if (rawMetadata.isPresent()) { + labels.putAll(mapper.readValue(rawMetadata.get(), Metadata.class).labels); + } + RecordingReplace replacement = RecordingReplace.NEVER; + if (replace.isPresent()) { + replacement = RecordingReplace.fromString(replace.get()); + } else if (restart.isPresent()) { + replacement = restart.get() ? RecordingReplace.ALWAYS : RecordingReplace.NEVER; + } ActiveRecording recording = - connectionManager.executeConnectedTask( - target, - connection -> { - RecordingOptionsBuilder optionsBuilder = - recordingOptionsBuilderFactory - .create(target) - .name(recordingName); - if (duration.isPresent()) { - optionsBuilder.duration(TimeUnit.SECONDS.toMillis(duration.get())); - } - if (toDisk.isPresent()) { - optionsBuilder.toDisk(toDisk.get()); - } - if (maxAge.isPresent()) { - optionsBuilder.maxAge(maxAge.get()); - } - if (maxSize.isPresent()) { - optionsBuilder.maxSize(maxSize.get()); - } - Map labels = new HashMap<>(); - if (rawMetadata.isPresent()) { - labels.putAll( - mapper.readValue(rawMetadata.get(), Metadata.class).labels); - } - RecordingReplace replacement = RecordingReplace.NEVER; - if (replace.isPresent()) { - replacement = RecordingReplace.fromString(replace.get()); - } else if (restart.isPresent()) { - replacement = - restart.get() - ? RecordingReplace.ALWAYS - : RecordingReplace.NEVER; - } - IConstrainedMap recordingOptions = optionsBuilder.build(); - return recordingHelper.startRecording( - target, - recordingOptions, - template, - new Metadata(labels), - archiveOnStop.orElse(false), - replacement, - connection); - }); + recordingHelper + .startRecording( + target, + replacement, + template, + new RecordingOptions( + recordingName, + toDisk, + archiveOnStop, + duration, + maxSize, + maxAge), + labels) + .await() + .atMost(Duration.ofSeconds(10)); if (recording.duration > 0) { scheduler.schedule( @@ -664,7 +665,7 @@ void stopRecording(long id, boolean archive) { recording.state = RecordingState.STOPPED; recording.persist(); if (archive) { - recordingHelper.saveRecording(recording); + recordingHelper.archiveRecording(recording, null, null); } } catch (Exception e) { logger.error("couldn't update recording", e); @@ -723,7 +724,7 @@ public void deleteRecording(@RestPath long targetId, @RestPath long remoteId) th .filter(r -> r.remoteId == remoteId) .findFirst() .ifPresentOrElse( - ActiveRecording::delete, + recordingHelper::deleteRecording, () -> { throw new NotFoundException(); }); @@ -772,6 +773,7 @@ public void deleteArchivedRecording(@RestPath String jvmId, @RestPath String fil ArchivedRecordingEvent.Payload.of( URI.create(connectUrl), new ArchivedRecording( + jvmId, filename, recordingHelper.downloadUrl(jvmId, filename), recordingHelper.reportUrl(jvmId, filename), @@ -830,11 +832,26 @@ public Uni uploadActiveToGrafana(@RestPath long targetId, @RestPath long return recordingHelper.uploadToJFRDatasource(targetId, remoteId); } + @POST + @Path("/api/beta/recordings/{connectUrl}/{filename}/upload") + @RolesAllowed("write") + public Response uploadArchivedToGrafanaBeta( + @RestPath String connectUrl, @RestPath String filename) throws Exception { + var jvmId = Target.getTargetByConnectUrl(URI.create(connectUrl)).jvmId; + return Response.status(RestResponse.Status.PERMANENT_REDIRECT) + .location( + URI.create( + String.format( + "/api/v3/grafana/%s", + recordingHelper.encodedKey(jvmId, filename)))) + .build(); + } + @POST @Path("/api/beta/fs/recordings/{jvmId}/{filename}/upload") @RolesAllowed("write") - public Response uploadArchivedToGrafanaBeta(@RestPath String jvmId, @RestPath String filename) - throws Exception { + public Response uploadArchivedToGrafanaFromPath( + @RestPath String jvmId, @RestPath String filename) throws Exception { return Response.status(RestResponse.Status.PERMANENT_REDIRECT) .location( URI.create( @@ -986,8 +1003,10 @@ public Response handleActiveDownload(@RestPath long id) throws Exception { String savename = recording.name; String filename = - recordingHelper.saveRecording( - recording, savename, Instant.now().plus(transientArchivesTtl)); + recordingHelper + .archiveRecording( + recording, savename, Instant.now().plus(transientArchivesTtl)) + .name(); String encodedKey = recordingHelper.encodedKey(recording.target.jvmId, filename); if (!savename.endsWith(".jfr")) { savename += ".jfr"; @@ -1128,6 +1147,7 @@ public record LinkedRecordingDescriptor( // TODO include jvmId and filename public record ArchivedRecording( + String jvmId, String name, String downloadUrl, String reportUrl, @@ -1135,6 +1155,7 @@ public record ArchivedRecording( long size, long archivedTime) { public ArchivedRecording { + Objects.requireNonNull(jvmId); Objects.requireNonNull(name); Objects.requireNonNull(downloadUrl); Objects.requireNonNull(reportUrl); @@ -1184,6 +1205,7 @@ public static Metadata empty() { public static final String ACTIVE_RECORDING_DELETED = "ActiveRecordingDeleted"; public static final String ACTIVE_RECORDING_SAVED = "ActiveRecordingSaved"; public static final String SNAPSHOT_RECORDING_CREATED = "SnapshotCreated"; + public static final String RECORDING_METADATA_UPDATED = "RecordingMetadataUpdated"; public enum RecordingEventCategory { ACTIVE_CREATED(ACTIVE_RECORDING_CREATED), @@ -1193,6 +1215,7 @@ public enum RecordingEventCategory { ARCHIVED_CREATED(ARCHIVED_RECORDING_CREATED), ARCHIVED_DELETED(ARCHIVED_RECORDING_DELETED), SNAPSHOT_CREATED(SNAPSHOT_RECORDING_CREATED), + METADATA_UPDATED(RECORDING_METADATA_UPDATED), ; private final String category; diff --git a/src/main/java/io/cryostat/rules/RuleService.java b/src/main/java/io/cryostat/rules/RuleService.java index e1a7165d6..e24915ef1 100644 --- a/src/main/java/io/cryostat/rules/RuleService.java +++ b/src/main/java/io/cryostat/rules/RuleService.java @@ -15,31 +15,25 @@ */ package io.cryostat.rules; -import java.io.IOException; +import java.time.Duration; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.openjdk.jmc.common.unit.IConstrainedMap; -import org.openjdk.jmc.common.unit.QuantityConversionException; -import org.openjdk.jmc.flightrecorder.configuration.recording.RecordingOptionsBuilder; -import org.openjdk.jmc.rjmx.ConnectionException; -import org.openjdk.jmc.rjmx.ServiceNotAvailableException; - import io.cryostat.core.templates.Template; import io.cryostat.core.templates.TemplateType; import io.cryostat.expressions.MatchExpressionEvaluator; import io.cryostat.recordings.ActiveRecording; import io.cryostat.recordings.RecordingHelper; +import io.cryostat.recordings.RecordingHelper.RecordingOptions; import io.cryostat.recordings.RecordingHelper.RecordingReplace; import io.cryostat.recordings.RecordingOptionsBuilderFactory; -import io.cryostat.recordings.Recordings.Metadata; import io.cryostat.rules.Rule.RuleEvent; import io.cryostat.targets.Target; import io.cryostat.targets.TargetConnectionManager; @@ -138,30 +132,22 @@ public void handleRuleRecordingCleanup(Rule rule) { @Transactional public void activate(Rule rule, Target target) throws Exception { - ActiveRecording recording = - connectionManager.executeConnectedTask( - target, - connection -> { - var recordingOptions = createRecordingOptions(rule, target); + var options = createRecordingOptions(rule); - Pair pair = - recordingHelper.parseEventSpecifier(rule.eventSpecifier); - Template template = - recordingHelper.getPreferredTemplate( - target, pair.getKey(), pair.getValue()); + Pair pair = recordingHelper.parseEventSpecifier(rule.eventSpecifier); + Template template = + recordingHelper.getPreferredTemplate(target, pair.getKey(), pair.getValue()); - Map labels = new HashMap<>(); - labels.put("rule", rule.name); - Metadata meta = new Metadata(labels); - return recordingHelper.startRecording( - target, - recordingOptions, - template, - meta, - false, - RecordingReplace.ALWAYS, - connection); - }); + ActiveRecording recording = + recordingHelper + .startRecording( + target, + RecordingReplace.STOPPED, + template, + options, + Map.of("rule", rule.name)) + .await() + .atMost(Duration.ofSeconds(10)); Target attachedTarget = entityManager.merge(target); var relatedRecordings = ruleRecordingMap.get(rule.id); @@ -172,20 +158,14 @@ public void activate(Rule rule, Target target) throws Exception { } } - private IConstrainedMap createRecordingOptions(Rule rule, Target target) - throws ConnectionException, - QuantityConversionException, - IOException, - ServiceNotAvailableException { - RecordingOptionsBuilder optionsBuilder = - recordingOptionsBuilderFactory.create(target).name(rule.getRecordingName()); - if (rule.maxAgeSeconds > 0) { - optionsBuilder.maxAge(rule.maxAgeSeconds); - } - if (rule.maxSizeBytes > 0) { - optionsBuilder.maxSize(rule.maxSizeBytes); - } - return optionsBuilder.build(); + private RecordingOptions createRecordingOptions(Rule rule) { + return new RecordingOptions( + rule.getRecordingName(), + Optional.of(true), + Optional.of(true), + Optional.empty(), + Optional.ofNullable((long) rule.maxSizeBytes), + Optional.ofNullable((long) rule.maxAgeSeconds)); } @Transactional diff --git a/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java b/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java index 047e8ee83..a0e4f1fae 100644 --- a/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java +++ b/src/main/java/io/cryostat/rules/ScheduledArchiveJob.java @@ -92,7 +92,7 @@ void initPreviousRecordings(Target target, Rule rule, Queue previousReco @Transactional void performArchival(ActiveRecording recording, Queue previousRecordings) throws Exception { - String filename = recordingHelper.saveRecording(recording); + String filename = recordingHelper.archiveRecording(recording, null, null).name(); previousRecordings.add(filename); } diff --git a/src/main/java/io/cryostat/targets/Target.java b/src/main/java/io/cryostat/targets/Target.java index 9060cbfad..67b33bd0b 100644 --- a/src/main/java/io/cryostat/targets/Target.java +++ b/src/main/java/io/cryostat/targets/Target.java @@ -147,6 +147,13 @@ public static record Annotations(Map platform, Map(), new HashMap<>()); } + + public Map merged() { + Map merged = new HashMap<>(); + cryostat().entrySet().forEach((e) -> merged.put(e.getKey(), e.getValue())); + merged.putAll(platform()); + return merged; + } } @Override @@ -181,10 +188,11 @@ public enum EventKind { } @SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}) - public record TargetDiscovery(EventKind kind, Target serviceRef) { + public record TargetDiscovery(EventKind kind, Target serviceRef, String jvmId) { public TargetDiscovery { Objects.requireNonNull(kind); Objects.requireNonNull(serviceRef); + Objects.requireNonNull(jvmId); } } @@ -268,14 +276,19 @@ private void notify(EventKind eventKind, Target target) { MessagingServer.class.getName(), new Notification( TARGET_JVM_DISCOVERY, - new TargetDiscoveryEvent(new TargetDiscovery(eventKind, target)))); - bus.publish(TARGET_JVM_DISCOVERY, new TargetDiscovery(eventKind, target)); + new TargetDiscoveryEvent( + new TargetDiscovery(eventKind, target, target.jvmId)))); + bus.publish(TARGET_JVM_DISCOVERY, new TargetDiscovery(eventKind, target, target.jvmId)); } public record TargetDiscoveryEvent(TargetDiscovery event) { public TargetDiscoveryEvent { Objects.requireNonNull(event); } + + public String jvmId() { + return event.serviceRef().jvmId; + } } } } diff --git a/src/main/java/io/cryostat/ws/MessagingServer.java b/src/main/java/io/cryostat/ws/MessagingServer.java index 8644dfe99..239ebc215 100644 --- a/src/main/java/io/cryostat/ws/MessagingServer.java +++ b/src/main/java/io/cryostat/ws/MessagingServer.java @@ -47,11 +47,11 @@ public class MessagingServer { private static final String CLIENT_ACTIVITY_CATEGORY = "WsClientActivity"; + @Inject ObjectMapper mapper; @Inject Logger logger; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final BlockingQueue msgQ; private final Set sessions = new CopyOnWriteArraySet<>(); - private final ObjectMapper mapper = new ObjectMapper(); MessagingServer(@ConfigProperty(name = "cryostat.messaging.queue.size") int capacity) { this.msgQ = new ArrayBlockingQueue<>(capacity); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 6091cdbac..d2c9840f3 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -57,6 +57,12 @@ quarkus.smallrye-openapi.info-contact-url=https://cryostat.io quarkus.smallrye-openapi.info-license-name=Apache 2.0 quarkus.smallrye-openapi.info-license-url=https://github.com/cryostatio/cryostat3/blob/main/LICENSE +quarkus.smallrye-graphql.events.enabled=true +quarkus.smallrye-graphql.root-path=/api/v3/graphql +quarkus.smallrye-graphql.http.get.enabled=true +quarkus.smallrye-graphql.print-data-fetcher-exception=true +quarkus.smallrye-graphql.log-payload=queryOnly + quarkus.http.access-log.enabled=true quarkus.log.category."io.quarkus.http.access-log".level=DEBUG quarkus.http.enable-compression=true diff --git a/src/test/java/itest/CustomTargetsTest.java b/src/test/java/itest/CustomTargetsTest.java index ccfea9b51..f5865310d 100644 --- a/src/test/java/itest/CustomTargetsTest.java +++ b/src/test/java/itest/CustomTargetsTest.java @@ -16,6 +16,7 @@ package itest; import java.net.UnknownHostException; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; @@ -224,16 +225,21 @@ void shouldBeAbleToDefineTarget() MatcherAssert.assertThat(item.getString("jvmId"), Matchers.equalTo(itestJvmId)); MatcherAssert.assertThat(item.getString("alias"), Matchers.equalTo(alias)); MatcherAssert.assertThat(item.getString("connectUrl"), Matchers.equalTo(SELF_JMX_URL)); - MatcherAssert.assertThat(item.getJsonObject("labels"), Matchers.equalTo(new JsonObject())); + MatcherAssert.assertThat(item.getJsonArray("labels"), Matchers.equalTo(new JsonArray())); MatcherAssert.assertThat( item.getJsonObject("annotations"), Matchers.equalTo( new JsonObject( Map.of( "platform", - Map.of(), + List.of(), "cryostat", - Map.of("REALM", "Custom Targets"))))); + List.of( + Map.of( + "key", + "REALM", + "value", + "Custom Targets")))))); } @Test diff --git a/src/test/java/itest/GraphQLTest.java b/src/test/java/itest/GraphQLTest.java new file mode 100644 index 000000000..c8063c4d4 --- /dev/null +++ b/src/test/java/itest/GraphQLTest.java @@ -0,0 +1,2125 @@ +/* + * Copyright The Cryostat Authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package itest; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import io.cryostat.util.HttpMimeType; + +import io.vertx.core.MultiMap; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.http.HttpHeaders; +import io.vertx.core.json.JsonArray; +import io.vertx.core.json.JsonObject; +import io.vertx.ext.web.client.HttpResponse; +import itest.bases.StandardSelfTest; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +@TestMethodOrder(OrderAnnotation.class) +@Disabled("TODO not all GraphQL queries are implemented") +class GraphQLTest extends StandardSelfTest { + + private final ExecutorService worker = ForkJoinPool.commonPool(); + + static final String TEST_RECORDING_NAME = "archivedRecording"; + + @Test + @Order(0) + void testEnvironmentNodeListing() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { environmentNodes(filter: { name: \"Custom Targets\" }) { name nodeType" + + " descendantTargets { name nodeType } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + EnvironmentNodesResponse actual = + mapper.readValue(resp.bodyAsString(), EnvironmentNodesResponse.class); + + EnvironmentNodes expected = new EnvironmentNodes(); + + EnvironmentNode jdp = new EnvironmentNode(); + jdp.name = "JDP"; + jdp.nodeType = "Realm"; + + jdp.descendantTargets = new ArrayList<>(); + Node cryostat = new Node(); + cryostat.name = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + cryostat.nodeType = "JVM"; + jdp.descendantTargets.add(cryostat); + + Node target = new Node(); + target.name = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + target.nodeType = "JVM"; + jdp.descendantTargets.add(target); + + expected.environmentNodes = List.of(jdp); + + MatcherAssert.assertThat(actual.data, Matchers.equalTo(expected)); + } + + @Test + @Order(1) + void testOtherContainersFound() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes { name nodeType labels target { alias connectUrl annotations {" + + " cryostat platform } } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + TargetNodesQueryResponse actual = + mapper.readValue(resp.bodyAsString(), TargetNodesQueryResponse.class); + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + TargetNode cryostat = new TargetNode(); + Target cryostatTarget = new Target(); + cryostatTarget.alias = "io.cryostat.Cryostat"; + cryostatTarget.connectUrl = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + cryostat.name = cryostatTarget.connectUrl; + cryostat.target = cryostatTarget; + cryostat.nodeType = "JVM"; + Annotations cryostatAnnotations = new Annotations(); + cryostatAnnotations.cryostat = + Map.of( + "REALM", + "JDP", + "JAVA_MAIN", + "io.cryostat.Cryostat", + "HOST", + "localhost", + "PORT", + "0"); + cryostatAnnotations.platform = Map.of(); + cryostatTarget.annotations = cryostatAnnotations; + cryostat.labels = Map.of(); + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasItem(cryostat)); + + String uri = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + String mainClass = "es.andrewazor.demo.Main"; + TargetNode ext = new TargetNode(); + Target target = new Target(); + target.alias = mainClass; + target.connectUrl = uri; + ext.name = target.connectUrl; + ext.target = target; + ext.nodeType = "JVM"; + Annotations annotations = new Annotations(); + annotations.cryostat = + Map.of("REALM", "JDP", "JAVA_MAIN", mainClass, "HOST", "localhost", "PORT", "0"); + annotations.platform = Map.of(); + target.annotations = annotations; + ext.labels = Map.of(); + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasItem(ext)); + } + + @Test + @Order(2) + void testQueryForSpecificTargetWithSpecificFields() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 0\" }) { name nodeType }" + + " }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + TargetNodesQueryResponse actual = + mapper.readValue(resp.bodyAsString(), TargetNodesQueryResponse.class); + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + String uri = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + TargetNode ext = new TargetNode(); + ext.name = uri; + ext.nodeType = "JVM"; + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasItem(ext)); + } + + @Test + @Order(3) + void testStartRecordingMutationOnSpecificTarget() throws Exception { + CountDownLatch latch = new CountDownLatch(2); + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 0\" }) {" + + " doStartRecording(recording: { name: \"graphql-itest\", duration: 30," + + " template: \"Profiling\", templateType: \"TARGET\", archiveOnStop: true," + + " metadata: { labels: [ { key: \"newLabel\", value: \"someValue\"} ] } }) {" + + " name state duration archiveOnStop }} }"); + Map expectedLabels = + Map.of( + "template.name", + "Profiling", + "template.type", + "TARGET", + "newLabel", + "someValue"); + Future f = + worker.submit( + () -> { + try { + return expectNotification( + "ActiveRecordingCreated", 15, TimeUnit.SECONDS) + .get(); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + + Thread.sleep(5000); // Sleep to setup notification listening before query resolves + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + StartRecordingMutationResponse actual = + mapper.readValue(resp.bodyAsString(), StartRecordingMutationResponse.class); + + latch.await(30, TimeUnit.SECONDS); + + // Ensure ActiveRecordingCreated notification emitted matches expected values + JsonObject notification = f.get(5, TimeUnit.SECONDS); + + JsonObject notificationRecording = + notification.getJsonObject("message").getJsonObject("recording"); + MatcherAssert.assertThat( + notificationRecording.getString("name"), Matchers.equalTo("graphql-itest")); + MatcherAssert.assertThat( + notificationRecording.getString("archiveOnStop"), Matchers.equalTo("true")); + MatcherAssert.assertThat( + notification.getJsonObject("message").getString("target"), + Matchers.equalTo( + String.format( + "service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi", "localhost", 0))); + Map notificationLabels = + notificationRecording.getJsonObject("metadata").getJsonObject("labels").getMap(); + for (var entry : expectedLabels.entrySet()) { + MatcherAssert.assertThat( + notificationLabels, Matchers.hasEntry(entry.getKey(), entry.getValue())); + } + + RecordingNodes nodes = new RecordingNodes(); + + ActiveRecording recording = new ActiveRecording(); + recording.name = "graphql-itest"; + recording.duration = 30_000L; + recording.state = "RUNNING"; + recording.archiveOnStop = true; + recording.metadata = RecordingMetadata.of(expectedLabels); + + StartRecording startRecording = new StartRecording(); + startRecording.doStartRecording = recording; + + nodes.targetNodes = List.of(startRecording); + + MatcherAssert.assertThat(actual.data, Matchers.equalTo(nodes)); + } + + @Test + @Order(4) + void testArchiveMutation() throws Exception { + Thread.sleep(5000); + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 0\" }) { recordings {" + + " active { data { name doArchive { name } } } } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + ArchiveMutationResponse actual = + mapper.readValue(resp.bodyAsString(), ArchiveMutationResponse.class); + + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + TargetNode node = actual.data.targetNodes.get(0); + + MatcherAssert.assertThat(node.recordings.active.data, Matchers.hasSize(1)); + + ActiveRecording activeRecording = node.recordings.active.data.get(0); + + MatcherAssert.assertThat(activeRecording.name, Matchers.equalTo("graphql-itest")); + + ArchivedRecording archivedRecording = activeRecording.doArchive; + MatcherAssert.assertThat( + archivedRecording.name, + Matchers.matchesRegex( + "^es-andrewazor-demo-Main_graphql-itest_[0-9]{8}T[0-9]{6}Z\\.jfr$")); + } + + @Test + @Order(5) + void testActiveRecordingMetadataMutation() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 9093\" }) {" + + "recordings { active {" + + " data {" + + " doPutMetadata(metadata: { labels: [" + + " {key:\"template.name\",value:\"Profiling\"}," + + " {key:\"template.type\",value:\"TARGET\"}," + + " {key:\"newLabel\",value:\"newValue\"}] })" + + " { metadata { labels } } } } } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + ActiveMutationResponse actual = + mapper.readValue(resp.bodyAsString(), ActiveMutationResponse.class); + + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + TargetNode node = actual.data.targetNodes.get(0); + + MatcherAssert.assertThat(node.recordings.active.data, Matchers.hasSize(1)); + + ActiveRecording activeRecording = node.recordings.active.data.get(0); + + MatcherAssert.assertThat( + activeRecording.metadata, + Matchers.equalTo( + RecordingMetadata.of( + Map.of( + "template.name", + "Profiling", + "template.type", + "TARGET", + "newLabel", + "newValue")))); + } + + @Test + @Order(6) + void testArchivedRecordingMetadataMutation() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 0\" }) {" + + "recordings { archived {" + + " data { name size " + + " doPutMetadata(metadata: { labels: [" + + " {key:\"template.name\",value:\"Profiling\"}," + + " {key:\"template.type\",value:\"TARGET\"}," + + " {key:\"newArchivedLabel\",value:\"newArchivedValue\"}] })" + + " { metadata { labels } } } } } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + ArchiveMutationResponse actual = + mapper.readValue(resp.bodyAsString(), ArchiveMutationResponse.class); + + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + TargetNode node = actual.data.targetNodes.get(0); + + MatcherAssert.assertThat(node.recordings.archived.data, Matchers.hasSize(1)); + + ArchivedRecording archivedRecording = node.recordings.archived.data.get(0); + MatcherAssert.assertThat(archivedRecording.size, Matchers.greaterThan(0L)); + + MatcherAssert.assertThat( + archivedRecording.metadata, + Matchers.equalTo( + RecordingMetadata.of( + Map.of( + "template.name", + "Profiling", + "template.type", + "TARGET", + "newArchivedLabel", + "newArchivedValue")))); + } + + @Test + @Order(7) + void testDeleteMutation() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { annotations: \"PORT == 0\" }) { recordings {" + + " active { data { name doDelete { name }" + + " } aggregate { count } }" + + " archived { data { name doDelete { name }" + + " } aggregate { count size } }" + + " } } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + DeleteMutationResponse actual = + mapper.readValue(resp.bodyAsString(), DeleteMutationResponse.class); + + MatcherAssert.assertThat(actual.data.targetNodes, Matchers.hasSize(1)); + + TargetNode node = actual.data.targetNodes.get(0); + + MatcherAssert.assertThat(node.recordings.active.data, Matchers.hasSize(1)); + MatcherAssert.assertThat(node.recordings.archived.data, Matchers.hasSize(1)); + MatcherAssert.assertThat(node.recordings.archived.aggregate.count, Matchers.equalTo(1L)); + MatcherAssert.assertThat(node.recordings.archived.aggregate.size, Matchers.greaterThan(0L)); + + ActiveRecording activeRecording = node.recordings.active.data.get(0); + ArchivedRecording archivedRecording = node.recordings.archived.data.get(0); + + MatcherAssert.assertThat(activeRecording.name, Matchers.equalTo("graphql-itest")); + MatcherAssert.assertThat(activeRecording.doDelete.name, Matchers.equalTo("graphql-itest")); + + MatcherAssert.assertThat( + archivedRecording.name, + Matchers.matchesRegex( + "^es-andrewazor-demo-Main_graphql-itest_[0-9]{8}T[0-9]{6}Z\\.jfr$")); + } + + @Test + @Order(8) + void testNodesHaveIds() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { environmentNodes(filter: { name: \"JDP\" }) { id descendantTargets { id }" + + " } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + // if any of the nodes in the query did not have an ID property then the request + // would fail + EnvironmentNodesResponse actual = + mapper.readValue(resp.bodyAsString(), EnvironmentNodesResponse.class); + Set observedIds = new HashSet<>(); + for (var env : actual.data.environmentNodes) { + // ids should be unique + MatcherAssert.assertThat(observedIds, Matchers.not(Matchers.contains(env.id))); + observedIds.add(env.id); + for (var target : env.descendantTargets) { + MatcherAssert.assertThat(observedIds, Matchers.not(Matchers.contains(target.id))); + observedIds.add(target.id); + } + } + } + + @Test + @Order(9) + void testQueryForSpecificTargetsByNames() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + String.format( + "query { targetNodes(filter: { names:" + + " [\"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\"," + + " \"service:jmx:rmi:///jndi/rmi://localhost:9091/jmxrmi\"] }) {" + + " name nodeType } }")); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + TargetNodesQueryResponse actual = + mapper.readValue(resp.bodyAsString(), TargetNodesQueryResponse.class); + List targetNodes = actual.data.targetNodes; + + int expectedSize = 2; + + assertThat(targetNodes.size(), is(expectedSize)); + + TargetNode target1 = new TargetNode(); + target1.name = "service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi"; + target1.nodeType = "JVM"; + TargetNode target2 = new TargetNode(); + target2.name = "service:jmx:rmi:///jndi/rmi://localhost:9091/jmxrmi"; + target2.nodeType = "JVM"; + + assertThat(targetNodes, hasItem(target1)); + assertThat(targetNodes, hasItem(target2)); + } + + @Test + @Order(10) + public void testQueryForFilteredActiveRecordingsByNames() throws Exception { + // Check preconditions + CompletableFuture listRespFuture1 = new CompletableFuture<>(); + webClient + .get( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .send( + ar -> { + if (assertRequestStatus(ar, listRespFuture1)) { + listRespFuture1.complete(ar.result().bodyAsJsonArray()); + } + }); + JsonArray listResp = listRespFuture1.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + Assertions.assertTrue(listResp.isEmpty()); + + // Create two new recordings + CompletableFuture createRecordingFuture1 = new CompletableFuture<>(); + MultiMap form1 = MultiMap.caseInsensitiveMultiMap(); + form1.add("recordingName", "Recording1"); + form1.add("duration", "5"); + form1.add("events", "template=ALL"); + webClient + .post( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .sendForm( + form1, + ar -> { + if (assertRequestStatus(ar, createRecordingFuture1)) { + createRecordingFuture1.complete(null); + } + }); + createRecordingFuture1.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + CompletableFuture createRecordingFuture2 = new CompletableFuture<>(); + MultiMap form2 = MultiMap.caseInsensitiveMultiMap(); + form2.add("recordingName", "Recording2"); + form2.add("duration", "5"); + form2.add("events", "template=ALL"); + webClient + .post( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .sendForm( + form2, + ar -> { + if (assertRequestStatus(ar, createRecordingFuture2)) { + createRecordingFuture2.complete(null); + } + }); + createRecordingFuture2.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + // GraphQL Query to filter Active recordings by names + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes (filter: {name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\"}){ recordings" + + " {active(filter: { names: [\"Recording1\", \"Recording2\",\"Recording3\"] })" + + " {data {name}}}}}"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + TargetNodesQueryResponse graphqlResp = + mapper.readValue(resp.bodyAsString(), TargetNodesQueryResponse.class); + + List filterNames = Arrays.asList("Recording1", "Recording2"); + + List filteredRecordings = + graphqlResp.data.targetNodes.stream() + .flatMap(targetNode -> targetNode.recordings.active.data.stream()) + .filter(recording -> filterNames.contains(recording.name)) + .collect(Collectors.toList()); + + MatcherAssert.assertThat(filteredRecordings.size(), Matchers.equalTo(2)); + ActiveRecording r1 = new ActiveRecording(); + r1.name = "Recording1"; + ActiveRecording r2 = new ActiveRecording(); + r2.name = "Recording2"; + + assertThat(filteredRecordings, hasItem(r1)); + assertThat(filteredRecordings, hasItem(r2)); + + // Delete recordings + for (ActiveRecording recording : filteredRecordings) { + String recordingName = recording.name; + CompletableFuture deleteRecordingFuture = new CompletableFuture<>(); + webClient + .delete( + String.format( + "/api/v1/targets/%s/recordings/%s", + getSelfReferenceConnectUrlEncoded(), recordingName)) + .send( + ar -> { + if (assertRequestStatus(ar, deleteRecordingFuture)) { + deleteRecordingFuture.complete(null); + } + }); + deleteRecordingFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + // Verify no recordings available + CompletableFuture listRespFuture4 = new CompletableFuture<>(); + webClient + .get( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .send( + ar -> { + if (assertRequestStatus(ar, listRespFuture4)) { + listRespFuture4.complete(ar.result().bodyAsJsonArray()); + } + }); + listResp = listRespFuture4.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + MatcherAssert.assertThat( + "list should have size 0 after deleting recordings", + listResp.size(), + Matchers.equalTo(0)); + } + + @Test + @Order(11) + public void shouldReturnArchivedRecordingsFilteredByNames() throws Exception { + // Check preconditions + CompletableFuture listRespFuture1 = new CompletableFuture<>(); + webClient + .get( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .send( + ar -> { + if (assertRequestStatus(ar, listRespFuture1)) { + listRespFuture1.complete(ar.result().bodyAsJsonArray()); + } + }); + JsonArray listResp = listRespFuture1.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + Assertions.assertTrue(listResp.isEmpty()); + + // Create a new recording + CompletableFuture createRecordingFuture = new CompletableFuture<>(); + MultiMap form = MultiMap.caseInsensitiveMultiMap(); + form.add("recordingName", TEST_RECORDING_NAME); + form.add("duration", "5"); + form.add("events", "template=ALL"); + webClient + .post( + String.format( + "/api/v1/targets/%s/recordings", + getSelfReferenceConnectUrlEncoded())) + .sendForm( + form, + ar -> { + if (assertRequestStatus(ar, createRecordingFuture)) { + createRecordingFuture.complete(null); + } + }); + createRecordingFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + // Archive the recording + CompletableFuture archiveRecordingFuture = new CompletableFuture<>(); + webClient + .patch( + String.format( + "/api/v1/targets/%s/recordings/%s", + getSelfReferenceConnectUrlEncoded(), TEST_RECORDING_NAME)) + .putHeader(HttpHeaders.CONTENT_TYPE.toString(), HttpMimeType.PLAINTEXT.mime()) + .sendBuffer( + Buffer.buffer("SAVE"), + ar -> { + if (assertRequestStatus(ar, archiveRecordingFuture)) { + archiveRecordingFuture.complete(null); + } else { + + archiveRecordingFuture.completeExceptionally( + new RuntimeException("Archive request failed")); + } + }); + + archiveRecordingFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + // retrieve to match the exact name + CompletableFuture archivedRecordingsFuture2 = new CompletableFuture<>(); + webClient + .get(String.format("/api/v1/recordings")) + .send( + ar -> { + if (assertRequestStatus(ar, archivedRecordingsFuture2)) { + archivedRecordingsFuture2.complete(ar.result().bodyAsJsonArray()); + } + }); + JsonArray retrivedArchivedRecordings = + archivedRecordingsFuture2.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + JsonObject retrievedArchivedrecordings = retrivedArchivedRecordings.getJsonObject(0); + String retrievedArchivedRecordingsName = retrievedArchivedrecordings.getString("name"); + + // GraphQL Query to filter Archived recordings by names + CompletableFuture resp2 = new CompletableFuture<>(); + + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes {" + + "recordings {" + + "archived(filter: { names: [\"" + + retrievedArchivedRecordingsName + + "\",\"someOtherName\"] }) {" + + "data {" + + "name" + + "}" + + "}" + + "}" + + "}" + + "}"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + TargetNodesQueryResponse graphqlResp = + mapper.readValue(resp.bodyAsString(), TargetNodesQueryResponse.class); + + List archivedRecordings2 = + graphqlResp.data.targetNodes.stream() + .flatMap(targetNode -> targetNode.recordings.archived.data.stream()) + .collect(Collectors.toList()); + + int filteredRecordingsCount = archivedRecordings2.size(); + Assertions.assertEquals( + 1, filteredRecordingsCount, "Number of filtered recordings should be 1"); + + ArchivedRecording archivedRecording = archivedRecordings2.get(0); + String filteredName = archivedRecording.name; + Assertions.assertEquals( + filteredName, + retrievedArchivedRecordingsName, + "Filtered name should match the archived recording name"); + + // Delete archived recording by name + for (ArchivedRecording archrecording : archivedRecordings2) { + String nameMatch = archrecording.name; + + CompletableFuture deleteFuture = new CompletableFuture<>(); + webClient + .delete( + String.format( + "/api/beta/recordings/%s/%s", + getSelfReferenceConnectUrlEncoded(), nameMatch)) + .send( + ar -> { + if (assertRequestStatus(ar, deleteFuture)) { + deleteFuture.complete(null); + } else { + deleteFuture.completeExceptionally( + new RuntimeException("Delete request failed")); + } + }); + + deleteFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + // Retrieve the list of updated archived recordings to verify that the targeted + // recordings + // have been deleted + CompletableFuture updatedArchivedRecordingsFuture = new CompletableFuture<>(); + webClient + .get("/api/v1/recordings") + .send( + ar -> { + if (assertRequestStatus(ar, updatedArchivedRecordingsFuture)) { + updatedArchivedRecordingsFuture.complete( + ar.result().bodyAsJsonArray()); + } + }); + + JsonArray updatedArchivedRecordings = + updatedArchivedRecordingsFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + // Assert that the targeted recordings have been deleted + boolean recordingsDeleted = + updatedArchivedRecordings.stream() + .noneMatch( + json -> { + JsonObject recording = (JsonObject) json; + return recording.getString("name").equals(TEST_RECORDING_NAME); + }); + + Assertions.assertTrue( + recordingsDeleted, "The targeted archived recordings should be deleted"); + + // Clean up what we created + CompletableFuture deleteRespFuture1 = new CompletableFuture<>(); + webClient + .delete( + String.format( + "/api/v1/targets/%s/recordings/%s", + getSelfReferenceConnectUrlEncoded(), TEST_RECORDING_NAME)) + .send( + ar -> { + if (assertRequestStatus(ar, deleteRespFuture1)) { + deleteRespFuture1.complete(null); + } + }); + + deleteRespFuture1.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + CompletableFuture savedRecordingsFuture = new CompletableFuture<>(); + webClient + .get("/api/v1/recordings") + .send( + ar -> { + if (assertRequestStatus(ar, savedRecordingsFuture)) { + savedRecordingsFuture.complete(ar.result().bodyAsJsonArray()); + } + }); + + JsonArray savedRecordings = + savedRecordingsFuture.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + for (Object savedRecording : savedRecordings) { + String recordingName = ((JsonObject) savedRecording).getString("name"); + if (recordingName.matches("archivedRecordings")) { + CompletableFuture deleteRespFuture2 = new CompletableFuture<>(); + webClient + .delete( + String.format( + "/api/beta/recordings/%s/%s", + getSelfReferenceConnectUrlEncoded(), recordingName)) + .send( + ar -> { + if (assertRequestStatus(ar, deleteRespFuture2)) { + deleteRespFuture2.complete(null); + } + }); + + deleteRespFuture2.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + } + } + + @Test + @Order(12) + public void testQueryforFilteredEnvironmentNodesByNames() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { environmentNodes(filter: { names: [\"anotherName1\"," + + " \"JDP\",\"anotherName2\"] }) { name nodeType } }"); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + EnvironmentNodesResponse actual = + mapper.readValue(resp.bodyAsString(), EnvironmentNodesResponse.class); + List environmentNodes = actual.data.environmentNodes; + + Assertions.assertEquals(1, environmentNodes.size(), "The list filtered should be 1"); + + boolean nameExists = false; + for (EnvironmentNode environmentNode : environmentNodes) { + if (environmentNode.name.matches("JDP")) { + nameExists = true; + break; + } + } + Assertions.assertTrue(nameExists, "Name not found"); + } + + @Test + @Order(13) + void testReplaceAlwaysOnStoppedRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Stop the Recording + notificationRecording = stopRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("STOPPED", notificationRecording.getString("state")); + + // Restart the recording with replace:ALWAYS + notificationRecording = restartRecording("ALWAYS"); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(14) + void testReplaceNeverOnStoppedRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Stop the Recording + notificationRecording = stopRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("STOPPED", notificationRecording.getString("state")); + + // Restart the recording with replace:NEVER + JsonObject error = restartRecordingWithError("NEVER"); + Assertions.assertTrue( + error.getString("message") + .contains("Recording with name \"test\" already exists"), + "Expected error message to contain 'Recording with name \"test\" already" + + " exists'"); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(15) + void testReplaceStoppedOnStoppedRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Stop the Recording + notificationRecording = stopRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("STOPPED", notificationRecording.getString("state")); + + // Restart the recording with replace:STOPPED + notificationRecording = restartRecording("STOPPED"); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @ParameterizedTest + @ValueSource(strings = {"STOPPED", "NEVER"}) + @Order(16) + void testReplaceStoppedOrNeverOnRunningRecording(String replace) throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Restart the recording with replace:NEVER + JsonObject error = restartRecordingWithError("STOPPED"); + Assertions.assertTrue( + error.getString("message") + .contains("Recording with name \"test\" already exists"), + "Expected error message to contain 'Recording with name \"test\" already" + + " exists'"); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(17) + void testReplaceAlwaysOnRunningRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Restart the recording with replace:ALWAYS + notificationRecording = restartRecording("ALWAYS"); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(18) + void testRestartTrueOnRunningRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Restart the recording with replace:ALWAYS + notificationRecording = restartRecording(true); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(19) + void testRestartFalseOnRunningRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Restart the recording with replace:NEVER + JsonObject error = restartRecordingWithError(false); + Assertions.assertTrue( + error.getString("message") + .contains("Recording with name \"test\" already exists"), + "Expected error message to contain 'Recording with name \"test\" already" + + " exists'"); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(20) + void testRestartTrueOnStoppedRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Stop the Recording + notificationRecording = stopRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("STOPPED", notificationRecording.getString("state")); + + // Restart the recording with replace:ALWAYS + notificationRecording = restartRecording(true); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @Test + @Order(21) + void testRestartFalseOnStoppedRecording() throws Exception { + try { + // Start a Recording + JsonObject notificationRecording = startRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + + // Stop the Recording + notificationRecording = stopRecording(); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("STOPPED", notificationRecording.getString("state")); + + // Restart the recording with replace:NEVER + JsonObject error = restartRecordingWithError(false); + Assertions.assertTrue( + error.getString("message") + .contains("Recording with name \"test\" already exists"), + "Expected error message to contain 'Recording with name \"test\" already" + + " exists'"); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @ParameterizedTest + @ValueSource(strings = {"ALWAYS", "STOPPED", "NEVER"}) + @Order(22) + void testStartRecordingwithReplaceNever(String replace) throws Exception { + try { + JsonObject notificationRecording = restartRecording(replace); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the Recording + deleteRecording(); + } + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + @Order(23) + void testRestartRecordingWithReplaceTrue(boolean restart) throws Exception { + try { + JsonObject notificationRecording = restartRecording(restart); + Assertions.assertEquals("test", notificationRecording.getString("name")); + Assertions.assertEquals("RUNNING", notificationRecording.getString("state")); + } finally { + // Delete the recording + deleteRecording(); + } + } + + static class Target { + String alias; + String connectUrl; + Annotations annotations; + + @Override + public int hashCode() { + return Objects.hash(alias, connectUrl, annotations); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Target other = (Target) obj; + return Objects.equals(alias, other.alias) + && Objects.equals(connectUrl, other.connectUrl) + && Objects.equals(annotations, other.annotations); + } + } + + static class Annotations { + Map platform; + Map cryostat; + + @Override + public int hashCode() { + return Objects.hash(cryostat, platform); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Annotations other = (Annotations) obj; + return Objects.equals(cryostat, other.cryostat) + && Objects.equals(platform, other.platform); + } + } + + static class ArchivedRecording { + String name; + String reportUrl; + String downloadUrl; + RecordingMetadata metadata; + long size; + long archivedTime; + + ArchivedRecording doDelete; + + @Override + public String toString() { + return "ArchivedRecording [doDelete=" + + doDelete + + ", downloadUrl=" + + downloadUrl + + ", metadata=" + + metadata + + ", name=" + + name + + ", reportUrl=" + + reportUrl + + ", size=" + + size + + ", archivedTime=" + + archivedTime + + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(doDelete, downloadUrl, metadata, name, reportUrl, size); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ArchivedRecording other = (ArchivedRecording) obj; + return Objects.equals(doDelete, other.doDelete) + && Objects.equals(downloadUrl, other.downloadUrl) + && Objects.equals(metadata, other.metadata) + && Objects.equals(name, other.name) + && Objects.equals(reportUrl, other.reportUrl) + && Objects.equals(size, other.size); + } + } + + static class AggregateInfo { + long count; + long size; + + @Override + public String toString() { + return "AggregateInfo [count=" + count + ", size=" + size + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(count, size); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + AggregateInfo other = (AggregateInfo) obj; + if (count != other.count) return false; + if (size != other.size) return false; + return true; + } + } + + static class Archived { + List data; + AggregateInfo aggregate; + + @Override + public String toString() { + return "Archived [data=" + data + ", aggregate=" + aggregate + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(data, aggregate); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Archived other = (Archived) obj; + return Objects.equals(data, other.data) && Objects.equals(aggregate, other.aggregate); + } + } + + static class Recordings { + Active active; + Archived archived; + + @Override + public String toString() { + return "Recordings [active=" + active + ", archived=" + archived + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(active, archived); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Recordings other = (Recordings) obj; + return Objects.equals(active, other.active) && Objects.equals(archived, other.archived); + } + } + + static class TargetNode { + String name; + String nodeType; + Map labels; + Target target; + Recordings recordings; + ActiveRecording doStartRecording; + + @Override + public String toString() { + return "TargetNode [doStartRecording=" + + doStartRecording + + ", labels=" + + labels + + ", name=" + + name + + ", nodeType=" + + nodeType + + ", recordings=" + + recordings + + ", target=" + + target + + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(doStartRecording, labels, name, nodeType, recordings, target); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TargetNode other = (TargetNode) obj; + return Objects.equals(doStartRecording, other.doStartRecording) + && Objects.equals(labels, other.labels) + && Objects.equals(name, other.name) + && Objects.equals(nodeType, other.nodeType) + && Objects.equals(recordings, other.recordings) + && Objects.equals(target, other.target); + } + } + + static class TargetNodes { + + List targetNodes; + + @Override + public int hashCode() { + return Objects.hash(targetNodes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TargetNodes other = (TargetNodes) obj; + return Objects.equals(targetNodes, other.targetNodes); + } + + @Override + public String toString() { + return "TargetNodes [targetNodes=" + targetNodes + "]"; + } + } + + static class TargetNodesQueryResponse { + TargetNodes data; + + @Override + public int hashCode() { + return Objects.hash(data); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + TargetNodesQueryResponse other = (TargetNodesQueryResponse) obj; + return Objects.equals(data, other.data); + } + } + + static class Active { + List data; + AggregateInfo aggregate; + + @Override + public String toString() { + return "Active [data=" + data + ", aggregate=" + aggregate + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(data, aggregate); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Active other = (Active) obj; + return Objects.equals(data, other.data) && Objects.equals(aggregate, other.aggregate); + } + } + + static class ActiveRecording { + String name; + String reportUrl; + String downloadUrl; + RecordingMetadata metadata; + String state; + long startTime; + long duration; + boolean continuous; + boolean toDisk; + long maxSize; + long maxAge; + boolean archiveOnStop; + + ArchivedRecording doArchive; + ActiveRecording doDelete; + + @Override + public int hashCode() { + return Objects.hash( + continuous, + doArchive, + doDelete, + downloadUrl, + duration, + maxAge, + maxSize, + archiveOnStop, + metadata, + name, + reportUrl, + startTime, + state, + toDisk); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ActiveRecording other = (ActiveRecording) obj; + return continuous == other.continuous + && Objects.equals(doArchive, other.doArchive) + && Objects.equals(doDelete, other.doDelete) + && Objects.equals(downloadUrl, other.downloadUrl) + && duration == other.duration + && maxAge == other.maxAge + && maxSize == other.maxSize + && archiveOnStop == other.archiveOnStop + && Objects.equals(metadata, other.metadata) + && Objects.equals(name, other.name) + && Objects.equals(reportUrl, other.reportUrl) + && startTime == other.startTime + && Objects.equals(state, other.state) + && toDisk == other.toDisk; + } + + @Override + public String toString() { + return "ActiveRecording [continuous=" + + continuous + + ", doArchive=" + + doArchive + + ", doDelete=" + + doDelete + + ", downloadUrl=" + + downloadUrl + + ", duration=" + + duration + + ", maxAge=" + + maxAge + + ", maxSize=" + + maxSize + + ", archiveOnStop=" + + archiveOnStop + + ", metadata=" + + metadata + + ", name=" + + name + + ", reportUrl=" + + reportUrl + + ", startTime=" + + startTime + + ", state=" + + state + + ", toDisk=" + + toDisk + + "]"; + } + } + + static class RecordingMetadata { + Map labels; + + public static RecordingMetadata of(Map of) { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(labels); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + RecordingMetadata other = (RecordingMetadata) obj; + return Objects.equals(labels, other.labels); + } + + @Override + public String toString() { + return "RecordingMetadata [labels=" + labels + "]"; + } + } + + static class StartRecording { + ActiveRecording doStartRecording; + ArchivedRecording doArchive; + ActiveRecording doPutMetadata; + + @Override + public int hashCode() { + return Objects.hash(doArchive, doStartRecording, doPutMetadata); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + StartRecording other = (StartRecording) obj; + return Objects.equals(doArchive, other.doArchive) + && Objects.equals(doStartRecording, other.doStartRecording) + && Objects.equals(doPutMetadata, other.doPutMetadata); + } + + @Override + public String toString() { + return "StartRecording [doArchive=" + + doArchive + + ", doStartRecording=" + + doStartRecording + + ", doPutMetadata=" + + doPutMetadata + + "]"; + } + } + + static class RecordingNodes { + List targetNodes; + + @Override + public int hashCode() { + return Objects.hash(targetNodes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + RecordingNodes other = (RecordingNodes) obj; + return Objects.equals(targetNodes, other.targetNodes); + } + + @Override + public String toString() { + return "RecordingNodes [targetNodes=" + targetNodes + "]"; + } + } + + static class StartRecordingMutationResponse { + RecordingNodes data; + + @Override + public int hashCode() { + return Objects.hash(data); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + StartRecordingMutationResponse other = (StartRecordingMutationResponse) obj; + return Objects.equals(data, other.data); + } + + @Override + public String toString() { + return "StartRecordingMutationResponse [data=" + data + "]"; + } + } + + static class Node { + int id; + String name; + String nodeType; + + @Override + public String toString() { + return "Node [id=" + id + ", name=" + name + ", nodeType=" + nodeType + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(id, name, nodeType); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Node other = (Node) obj; + return id == other.id + && Objects.equals(name, other.name) + && Objects.equals(nodeType, other.nodeType); + } + } + + static class EnvironmentNode extends Node { + List descendantTargets; + + @Override + public String toString() { + return "EnvironmentNode [descendantTargets=" + + descendantTargets + + ", name=" + + name + + ", nodeType=" + + nodeType + + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(descendantTargets); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + EnvironmentNode other = (EnvironmentNode) obj; + return Objects.equals(descendantTargets, other.descendantTargets); + } + } + + static class EnvironmentNodes { + List environmentNodes; + + @Override + public String toString() { + return "EnvironmentNodes [environmentNodes=" + environmentNodes + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(environmentNodes); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + EnvironmentNodes other = (EnvironmentNodes) obj; + return Objects.equals(environmentNodes, other.environmentNodes); + } + } + + static class EnvironmentNodesResponse { + EnvironmentNodes data; + + @Override + public String toString() { + return "EnvironmentNodesResponse [data=" + data + "]"; + } + + public EnvironmentNodes getData() { + return data; + } + + public void setData(EnvironmentNodes data) { + this.data = data; + } + } + + static class ArchiveMutationResponse { + TargetNodes data; + + @Override + public String toString() { + return "ArchiveMutationResponse [data=" + data + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(data); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ArchiveMutationResponse other = (ArchiveMutationResponse) obj; + return Objects.equals(data, other.data); + } + } + + static class ActiveMutationResponse extends ArchiveMutationResponse { + @Override + public String toString() { + return "ActiveMutationResponse [data=" + data + "]"; + } + } + + static class DeleteMutationResponse { + TargetNodes data; + + @Override + public String toString() { + return "DeleteMutationResponse [data=" + data + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(data); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DeleteMutationResponse other = (DeleteMutationResponse) obj; + return Objects.equals(data, other.data); + } + } + + // start recording + private JsonObject startRecording() throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: {" + + " name:\"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\" }) {" + + " doStartRecording(recording: { name: \"test\", template:\"Profiling\"," + + " templateType: \"TARGET\"}) { name state}} }"); + Future f = + worker.submit( + () -> { + try { + return expectNotification( + "ActiveRecordingCreated", 15, TimeUnit.SECONDS) + .get(); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + + Thread.sleep(5000); + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + JsonObject notification = f.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + return notification.getJsonObject("message").getJsonObject("recording"); + } + + // Stop the Recording + private JsonObject stopRecording() throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\" }) {" + + " recordings { active { data { doStop { name state } } } } } }"); + + Future f2 = + worker.submit( + () -> { + try { + return expectNotification( + "ActiveRecordingStopped", 15, TimeUnit.SECONDS) + .get(); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + + Thread.sleep(5000); + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + JsonObject notification = f2.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + return notification.getJsonObject("message").getJsonObject("recording"); + } + + // Delete the Recording + private void deleteRecording() throws Exception { + JsonObject query = new JsonObject(); + query.put( + "query", + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\" }) {" + + " recordings { active { data { doDelete { name state } } } } } }"); + + Thread.sleep(5000); + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + } + + // Restart the recording with given replacement policy + private JsonObject restartRecording(String replace) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + JsonObject query = new JsonObject(); + query.put( + "query", + String.format( + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\" }) {" + + " doStartRecording(recording: { name: \"test\"," + + " template:\"Profiling\", templateType: \"TARGET\", replace: %s}) {" + + " name state }} }", + replace)); + Future f = + worker.submit( + () -> { + try { + return expectNotification( + "ActiveRecordingCreated", 15, TimeUnit.SECONDS) + .get(); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + + Thread.sleep(5000); + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + JsonObject notification = f.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + return notification.getJsonObject("message").getJsonObject("recording"); + } + + private JsonObject restartRecording(boolean restart) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + JsonObject query = new JsonObject(); + query.put( + "query", + String.format( + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\" }) {" + + " doStartRecording(recording: { name: \"test\"," + + " template:\"Profiling\", templateType: \"TARGET\", restart: %b}) {" + + " name state }} }", + restart)); + Future f = + worker.submit( + () -> { + try { + return expectNotification( + "ActiveRecordingCreated", 15, TimeUnit.SECONDS) + .get(); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + + Thread.sleep(5000); + + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(200)).and(Matchers.lessThan(300))); + + latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + JsonObject notification = f.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); + return notification.getJsonObject("message").getJsonObject("recording"); + } + + private JsonObject restartRecordingWithError(String replace) throws Exception { + JsonObject query = new JsonObject(); + + query.put( + "query", + String.format( + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\"" + + " }) { doStartRecording(recording: { name: \"test\"," + + " template:\"Profiling\", templateType: \"TARGET\", replace: %s})" + + " { name state }} }", + replace)); + Thread.sleep(5000); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(400)).and(Matchers.lessThan(600))); + + JsonObject response = resp.bodyAsJsonObject(); + JsonArray errors = response.getJsonArray("errors"); + return errors.getJsonObject(0); + } + + private JsonObject restartRecordingWithError(boolean restart) throws Exception { + JsonObject query = new JsonObject(); + + query.put( + "query", + String.format( + "query { targetNodes(filter: { name:" + + " \"service:jmx:rmi:///jndi/rmi://localhost:0/jmxrmi\"" + + " }) { doStartRecording(recording: { name: \"test\"," + + " template:\"Profiling\", templateType: \"TARGET\", restart: %b})" + + " { name state }} }", + restart)); + Thread.sleep(5000); + HttpResponse resp = + webClient + .extensions() + .post("/api/v2.2/graphql", query.toBuffer(), REQUEST_TIMEOUT_SECONDS); + MatcherAssert.assertThat( + resp.statusCode(), + Matchers.both(Matchers.greaterThanOrEqualTo(400)).and(Matchers.lessThan(600))); + + JsonObject response = resp.bodyAsJsonObject(); + JsonArray errors = response.getJsonArray("errors"); + return errors.getJsonObject(0); + } +}