Skip to content

Commit

Permalink
Merge pull request #134 from ruromero/telemetry
Browse files Browse the repository at this point in the history
feat: add telemetry
  • Loading branch information
ruromero authored Aug 31, 2023
2 parents 17167e1 + f0e7a34 commit 9be7eb6
Show file tree
Hide file tree
Showing 19 changed files with 597 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/inform-cli.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
with:
github-token: ${{ secrets.CRDA_CLI_REPO_PAT }}
script: |
['crda-java-api', 'crda-javascript-api'].forEach(async repo => {
['exhort-java-api', 'exhort-javascript-api'].forEach(async repo => {
await github.rest.repos.createDispatchEvent({
owner: "RHEcosystemAppEng",
repo: repo,
Expand Down
17 changes: 15 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -205,12 +205,25 @@ http -v :8080/api/v3/token ex-snyk-token==example-token
The possible responses are:

- 200 - Token validated successfully
- 400 - Missing authentication header
- 401 - Invalid auth token provided
- 400 - Missing provider authentication headers
- 401 - Invalid auth token provided or Missing required authentication header (rhda-token)
- 403 - The token is not authorized
- 429 - Rate limit exceeded
- 500 - Server error

## Telemetry

API Clients are expected to send a `rhda-token` HTTP Header that will be used to correlate
different events related to the same user.
If the header is not provided an anonymous event with a generated UUID will be sent instead.

Telemetry connects to [Segment](https://segment.com/) for sending events.
The connection can be configured with the following properties.

- `telemetry.disabled`: To completely disable telemetry
- `telemetry.write-key`: Authentication key to connect to Segment
- `quarkus.rest-client.segment-api.url`: Segment API endpoint

## Deploy on OpenShift

The required parameters can be injected as environment variables through a secret. Create the `exhort-secret` Secret before deploying the application.
Expand Down
6 changes: 6 additions & 0 deletions api-spec/v3/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ paths:
operationId: analysis
summary: Takes a client-resolved dependency graph to perform a full stack analysis from all the available Vulnerability sources
security:
- RhdaTokenAuth: []
- SnykTokenAuth: []
- OssIndexUserAuth: []
OssIndexTokenAuth: []
Expand Down Expand Up @@ -75,6 +76,7 @@ paths:
operationId: validateToken
summary: Validates a vulnerability provider token
security:
- RhdaTokenAuth: []
- SnykTokenAuth: []
- OssIndexUserAuth: []
OssIndexTokenAuth: []
Expand Down Expand Up @@ -117,6 +119,10 @@ paths:

components:
securitySchemes:
RhdaTokenAuth:
type: apiKey
in: header
name: rhda-token
SnykTokenAuth:
type: apiKey
in: header
Expand Down
5 changes: 5 additions & 0 deletions deploy/openshift/template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ objects:
secretKeyRef:
name: exhort-secret
key: api-snyk-token
- name: TELEMETRY_WRITE_KEY
valueFrom:
secretKeyRef:
name: exhort-secret
key: telemetry-write-key
securityContext:
runAsNonRoot: true
resources:
Expand Down
23 changes: 20 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.redhat.ecosystemappeng</groupId>
<artifactId>exhort</artifactId>
<name>RHDA - Exhort</name>
<version>0.0.1-SNAPSHOT</version>

<licenses>
Expand All @@ -17,6 +18,8 @@
</licenses>

<properties>
<timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm:ss.SSS'Z'</maven.build.timestamp.format>
<maven.compiler.release>17</maven.compiler.release>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
Expand Down Expand Up @@ -96,6 +99,14 @@
<groupId>io.quarkus</groupId>
<artifactId>quarkus-micrometer-registry-prometheus</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-rest-client-reactive-jackson</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-openapi</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel.quarkus</groupId>
<artifactId>camel-quarkus-direct</artifactId>
Expand All @@ -118,11 +129,11 @@
</dependency>
<dependency>
<groupId>org.apache.camel.quarkus</groupId>
<artifactId>camel-quarkus-log</artifactId>
<artifactId>camel-quarkus-seda</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-openapi</artifactId>
<groupId>org.apache.camel.quarkus</groupId>
<artifactId>camel-quarkus-log</artifactId>
</dependency>
<dependency>
<groupId>org.cyclonedx</groupId>
Expand Down Expand Up @@ -164,6 +175,12 @@
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
<plugins>

<plugin>
Expand Down
210 changes: 210 additions & 0 deletions src/main/java/com/redhat/exhort/analytics/AnalyticsService.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
/*
* Copyright 2023 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.redhat.exhort.analytics;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.camel.Exchange;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipse.microprofile.rest.client.inject.RestClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.redhat.exhort.analytics.segment.Context;
import com.redhat.exhort.analytics.segment.IdentifyEvent;
import com.redhat.exhort.analytics.segment.Library;
import com.redhat.exhort.analytics.segment.SegmentService;
import com.redhat.exhort.analytics.segment.TrackEvent;
import com.redhat.exhort.api.AnalysisReport;
import com.redhat.exhort.api.DependencyReport;
import com.redhat.exhort.integration.Constants;

import io.quarkus.runtime.annotations.RegisterForReflection;

import jakarta.enterprise.context.ApplicationScoped;
import jakarta.ws.rs.core.Response;

@ApplicationScoped
@RegisterForReflection
public class AnalyticsService {

private static final Logger LOGGER = LoggerFactory.getLogger(AnalyticsService.class);

private static final String RHDA_TOKEN = "rhda-token";
private static final String ANONYMOUS_ID = "telemetry-anonymous-id";
private static final String ANALYSIS_EVENT = "rhda.exhort.analysis";
private static final String TOKEN_EVENT = "rhda.exhort.token";

@ConfigProperty(name = "telemetry.disabled", defaultValue = "false")
Boolean disabled;

@ConfigProperty(name = "project.id")
String projectId;

@ConfigProperty(name = "project.name")
String projectName;

@ConfigProperty(name = "project.version")
String projectVersion;

@ConfigProperty(name = "project.build")
String projectBuild;

@RestClient SegmentService segmentService;

public void identify(Exchange exchange) {
if (disabled) {
return;
}

String userId = exchange.getIn().getHeader(RHDA_TOKEN, String.class);
if (userId == null) {
String anonymousId = UUID.randomUUID().toString();
Map<String, String> traits = new HashMap<>();
traits.put("serverName", projectName);
traits.put("serverVersion", projectVersion);
traits.put("serverBuild", projectBuild);
IdentifyEvent event =
new IdentifyEvent.Builder()
.context(new Context(new Library(projectId, projectVersion)))
.anonymousId(anonymousId)
.traits(traits)
.build();
try {
Response response = segmentService.identify(event);
if (response.getStatus() >= 400) {
LOGGER.warn(
String.format(
"Unable to send event to segment: %d - %s",
response.getStatus(), response.getStatusInfo()));
}
} catch (Exception e) {
LOGGER.warn("Unable to send event to segment", e);
}
exchange.setProperty(ANONYMOUS_ID, anonymousId);
} else {
// no need to IDENTIFY as we expect the caller to have done that already
exchange.setProperty(RHDA_TOKEN, userId);
exchange.getIn().removeHeader(RHDA_TOKEN);
}
}

public void trackAnalysis(Exchange exchange) {
if (disabled) {
return;
}
TrackEvent.Builder builder = prepareTrackEvent(exchange, ANALYSIS_EVENT);
AnalysisReport report = exchange.getProperty(Constants.REPORT_PROPERTY, AnalysisReport.class);
Map<String, Object> properties = new HashMap<>();
if (report != null) {
Map<String, Object> providers = new HashMap<>();
Map<String, Object> reportProps = new HashMap<>();
// TODO: Adapt after multi-source is implemented
reportProps.put("dependencies", report.getSummary().getDependencies());
reportProps.put("vulnerabilities", report.getSummary().getVulnerabilities());
providers.put("report", reportProps);
providers.put("provider", Constants.SNYK_PROVIDER);
providers.put("recommendations", countRecommendations(report));
providers.put("remediations", countRemediations(report));
properties.put(
"requestType", exchange.getProperty(Constants.REQUEST_CONTENT_PROPERTY, String.class));
properties.put("providers", providers);
properties.put("sbom", exchange.getProperty(Constants.SBOM_TYPE_PARAM, String.class));
}
try {
Response response = segmentService.track(builder.properties(properties).build());
if (response.getStatus() >= 400) {
LOGGER.warn(
String.format(
"Unable to send event to segment: %d - %s",
response.getStatus(), response.getStatusInfo()));
}
} catch (Exception e) {
LOGGER.warn("Unable to send event to segment", e);
}
}

public void trackToken(Exchange exchange) {
if (disabled) {
return;
}
TrackEvent.Builder builder = prepareTrackEvent(exchange, TOKEN_EVENT);
Map<String, Object> properties = new HashMap<>();
properties.put("providers", exchange.getProperty(Constants.PROVIDERS_PARAM, List.class));
properties.put(
"statusCode", exchange.getIn().getHeader(Exchange.HTTP_RESPONSE_CODE, String.class));
try {
Response response = segmentService.track(builder.properties(properties).build());
if (response.getStatus() >= 400) {
LOGGER.warn(
String.format(
"Unable to send event to segment: %d - %s",
response.getStatus(), response.getStatusInfo()));
}
} catch (Exception e) {
LOGGER.warn("Unable to enqueue event to segment", e);
}
}

private TrackEvent.Builder prepareTrackEvent(Exchange exchange, String eventName) {
TrackEvent.Builder builder = new TrackEvent.Builder(eventName);
String userId = exchange.getProperty(RHDA_TOKEN, String.class);
if (userId != null) {
builder.userId(userId);
} else {
String anonymousId = exchange.getProperty(ANONYMOUS_ID, String.class);
builder.anonymousId(anonymousId);
}
return builder.context(new Context(new Library(projectId, projectVersion)));
}

private long countRemediations(AnalysisReport report) {
AtomicLong counter = new AtomicLong();
report
.getDependencies()
.forEach(
d -> {
if (d.getRemediations() != null) {
counter.addAndGet(d.getRemediations().size());
}
if (d.getTransitive() != null) {
d.getTransitive()
.forEach(
t -> {
if (t.getRemediations() != null) {
counter.addAndGet(t.getRemediations().size());
}
});
}
});
return counter.get();
}

private long countRecommendations(AnalysisReport report) {
return report.getDependencies().stream()
.map(DependencyReport::getRecommendation)
.filter(Objects::nonNull)
.count();
}
}
Loading

0 comments on commit 9be7eb6

Please sign in to comment.