NOTIFICATION_SERDE = new KafkaProtobufSerde<>(Notification.parser());
@@ -58,6 +59,7 @@ public final class KafkaTopics {
REPO_META_ANALYSIS_RESULT = new Topic<>("dtrack.repo-meta-analysis.result", Serdes.String(), new KafkaProtobufSerde<>(AnalysisResult.parser()));
VULN_ANALYSIS_COMMAND = new Topic<>("dtrack.vuln-analysis.component", new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanCommand.parser()));
VULN_ANALYSIS_RESULT = new Topic<>("dtrack.vuln-analysis.result", new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser()));
+ VULN_ANALYSIS_RESULT_PROCESSED = new Topic<>("dtrack.vuln-analysis.result.processed", Serdes.String(), new KafkaProtobufSerde<>(ScanResult.parser()));
NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE = new Topic<>("dtrack.notification.project-vuln-analysis-complete", Serdes.String(), NOTIFICATION_SERDE);
}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/DelayedBomProcessedNotificationProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/DelayedBomProcessedNotificationProcessor.java
index cddc5b692..ba1c9ab1a 100644
--- a/src/main/java/org/dependencytrack/event/kafka/processor/DelayedBomProcessedNotificationProcessor.java
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/DelayedBomProcessedNotificationProcessor.java
@@ -1,87 +1,128 @@
package org.dependencytrack.event.kafka.processor;
+import alpine.Config;
import alpine.common.logging.Logger;
-import alpine.notification.NotificationLevel;
-import org.apache.kafka.streams.processor.api.ContextualProcessor;
-import org.apache.kafka.streams.processor.api.Processor;
-import org.apache.kafka.streams.processor.api.Record;
-import org.dependencytrack.model.Bom;
-import org.dependencytrack.model.Project;
+import com.google.protobuf.Any;
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.Timestamp;
+import com.google.protobuf.util.Timestamps;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.common.ConfigKey;
+import org.dependencytrack.event.kafka.KafkaEvent;
+import org.dependencytrack.event.kafka.KafkaEventDispatcher;
+import org.dependencytrack.event.kafka.KafkaTopics;
+import org.dependencytrack.event.kafka.processor.api.BatchProcessor;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
import org.dependencytrack.model.VulnerabilityScan;
-import org.dependencytrack.model.WorkflowStatus;
-import org.dependencytrack.model.WorkflowStep;
import org.dependencytrack.notification.NotificationConstants;
import org.dependencytrack.notification.NotificationGroup;
-import org.dependencytrack.notification.NotificationScope;
-import org.dependencytrack.notification.vo.BomConsumedOrProcessed;
import org.dependencytrack.persistence.QueryManager;
+import org.dependencytrack.persistence.jdbi.NotificationSubjectDao;
+import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject;
import org.dependencytrack.proto.notification.v1.Notification;
+import org.dependencytrack.proto.notification.v1.ProjectVulnAnalysisCompleteSubject;
-import javax.jdo.Query;
-import java.util.UUID;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
-import static org.dependencytrack.parser.dependencytrack.NotificationModelConverter.convert;
+import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi;
+import static org.dependencytrack.proto.notification.v1.Group.GROUP_BOM_PROCESSED;
+import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL;
+import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO;
/**
- * A {@link Processor} responsible for dispatching {@link NotificationGroup#BOM_PROCESSED} notifications
+ * A {@link BatchProcessor} responsible for dispatching {@link NotificationGroup#BOM_PROCESSED} notifications
* upon detection of a completed {@link VulnerabilityScan}.
+ *
+ * The completion detection is based on {@link NotificationGroup#PROJECT_VULN_ANALYSIS_COMPLETE} notifications.
+ * This processor does nothing unless {@link ConfigKey#TMP_DELAY_BOM_PROCESSED_NOTIFICATION} is enabled.
*/
-public class DelayedBomProcessedNotificationProcessor extends ContextualProcessor {
+class DelayedBomProcessedNotificationProcessor implements BatchProcessor {
+
+ static final String PROCESSOR_NAME = "delayed.bom.processed.notification";
private static final Logger LOGGER = Logger.getLogger(DelayedBomProcessedNotificationProcessor.class);
+ private final Config config;
+ private final KafkaEventDispatcher eventDispatcher;
+
+ DelayedBomProcessedNotificationProcessor() {
+ this(Config.getInstance(), new KafkaEventDispatcher());
+ }
+
+ DelayedBomProcessedNotificationProcessor(final Config config, final KafkaEventDispatcher eventDispatcher) {
+ this.config = config;
+ this.eventDispatcher = eventDispatcher;
+ }
+
@Override
- public void process(final Record record) {
- final VulnerabilityScan vulnScan = record.value();
+ public void process(final List> records) throws ProcessingException {
+ if (!config.getPropertyAsBoolean(ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION)) {
+ return;
+ }
- if (vulnScan.getStatus() != VulnerabilityScan.Status.COMPLETED
- && vulnScan.getStatus() != VulnerabilityScan.Status.FAILED) {
- LOGGER.warn("Received vulnerability scan with non-terminal status %s; Dropping (token=%s, project=%s)"
- .formatted(vulnScan.getStatus(), vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ final Set tokens = extractTokens(records);
+ if (tokens.isEmpty()) {
+ LOGGER.warn("No token could be extracted from any of the %d records in this batch"
+ .formatted(records.size()));
return;
}
- final Project project;
+ final List subjects;
try (final var qm = new QueryManager()) {
- if (!qm.hasWorkflowStepWithStatus(UUID.fromString(vulnScan.getToken()), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)) {
- LOGGER.debug("Received completed vulnerability scan, but no %s step exists in this workflow; Dropping (token=%s, project=%s)"
- .formatted(WorkflowStep.BOM_PROCESSING, vulnScan.getToken(), vulnScan.getTargetIdentifier()));
- return;
+ subjects = jdbi(qm).withExtension(NotificationSubjectDao.class,
+ dao -> dao.getForDelayedBomProcessed(tokens));
+ }
+
+ dispatchNotifications(subjects);
+ }
+
+ private static Set extractTokens(final List> records) {
+ final var tokens = new HashSet();
+ for (final ConsumerRecord record : records) {
+ final Notification notification = record.value();
+ if (!notification.hasSubject() || !notification.getSubject().is(ProjectVulnAnalysisCompleteSubject.class)) {
+ continue;
}
- project = getProject(qm, vulnScan.getTargetIdentifier());
- if (project == null) {
- LOGGER.warn("Received completed vulnerability scan, but the target project does not exist; Dropping (token=%s, project=%s)"
- .formatted(vulnScan.getToken(), vulnScan.getTargetIdentifier()));
- return;
+ final ProjectVulnAnalysisCompleteSubject subject;
+ try {
+ subject = notification.getSubject().unpack(ProjectVulnAnalysisCompleteSubject.class);
+ } catch (InvalidProtocolBufferException e) {
+ LOGGER.warn("Failed to unpack notification subject from %s; Skipping".formatted(record), e);
+ continue;
}
+
+ tokens.add(subject.getToken());
}
- final var alpineNotification = new alpine.notification.Notification()
- .scope(NotificationScope.PORTFOLIO)
- .group(NotificationGroup.BOM_PROCESSED)
- .level(NotificationLevel.INFORMATIONAL)
- .title(NotificationConstants.Title.BOM_PROCESSED)
- // BOM format and spec version are hardcoded because we don't have this information at this point.
- // DT currently only accepts CycloneDX anyway.
- .content("A %s BOM was processed".formatted(Bom.Format.CYCLONEDX.getFormatShortName()))
- .subject(new BomConsumedOrProcessed(UUID.fromString(vulnScan.getToken()), project, /* bom */ "(Omitted)", Bom.Format.CYCLONEDX, "Unknown"));
-
- context().forward(record.withKey(project.getUuid().toString()).withValue(convert(alpineNotification)));
- LOGGER.info("Dispatched delayed %s notification (token=%s, project=%s)"
- .formatted(NotificationGroup.BOM_PROCESSED, vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ return tokens;
}
- private static Project getProject(final QueryManager qm, final UUID uuid) {
- final Query projectQuery = qm.getPersistenceManager().newQuery(Project.class);
- projectQuery.setFilter("uuid == :uuid");
- projectQuery.setParameters(uuid);
- projectQuery.getFetchPlan().clearGroups(); // Ensure we're not loading too much bloat.
- projectQuery.getFetchPlan().setGroup(Project.FetchGroup.NOTIFICATION.name());
- try {
- return qm.getPersistenceManager().detachCopy(projectQuery.executeResultUnique(Project.class));
- } finally {
- projectQuery.closeAll();
+ private void dispatchNotifications(final List subjects) {
+ final Timestamp timestamp = Timestamps.now();
+ final var events = new ArrayList>(subjects.size());
+ for (final BomConsumedOrProcessedSubject subject : subjects) {
+ final var event = new KafkaEvent<>(KafkaTopics.NOTIFICATION_BOM,
+ subject.getProject().getUuid(), Notification.newBuilder()
+ .setScope(SCOPE_PORTFOLIO)
+ .setGroup(GROUP_BOM_PROCESSED)
+ .setLevel(LEVEL_INFORMATIONAL)
+ .setTimestamp(timestamp)
+ .setTitle(NotificationConstants.Title.BOM_PROCESSED)
+ .setContent("A %s BOM was processed".formatted(subject.getBom().getFormat()))
+ .setSubject(Any.pack(subject))
+ .build());
+ events.add(event);
+ }
+
+ eventDispatcher.dispatchAllBlocking(events);
+
+ for (final BomConsumedOrProcessedSubject subject : subjects) {
+ LOGGER.info("Dispatched delayed %s notification (token=%s, project=%s)"
+ .formatted(GROUP_BOM_PROCESSED, subject.getToken(), subject.getProject().getUuid()));
}
}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsHealthCheck.java b/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsHealthCheck.java
new file mode 100644
index 000000000..d70862b18
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsHealthCheck.java
@@ -0,0 +1,17 @@
+package org.dependencytrack.event.kafka.processor;
+
+import org.eclipse.microprofile.health.HealthCheck;
+import org.eclipse.microprofile.health.HealthCheckResponse;
+import org.eclipse.microprofile.health.Liveness;
+
+import static org.dependencytrack.event.kafka.processor.KafkaProcessorsInitializer.PROCESSOR_MANAGER;
+
+@Liveness
+public class KafkaProcessorsHealthCheck implements HealthCheck {
+
+ @Override
+ public HealthCheckResponse call() {
+ return PROCESSOR_MANAGER.probeHealth();
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsInitializer.java b/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsInitializer.java
new file mode 100644
index 000000000..ce6749dfb
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/KafkaProcessorsInitializer.java
@@ -0,0 +1,44 @@
+package org.dependencytrack.event.kafka.processor;
+
+import alpine.Config;
+import alpine.common.logging.Logger;
+import org.dependencytrack.common.ConfigKey;
+import org.dependencytrack.event.kafka.KafkaTopics;
+import org.dependencytrack.event.kafka.processor.api.ProcessorManager;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+public class KafkaProcessorsInitializer implements ServletContextListener {
+
+ private static final Logger LOGGER = Logger.getLogger(KafkaProcessorsInitializer.class);
+
+ static final ProcessorManager PROCESSOR_MANAGER = new ProcessorManager();
+
+ @Override
+ public void contextInitialized(final ServletContextEvent event) {
+ LOGGER.info("Initializing processors");
+
+ PROCESSOR_MANAGER.registerProcessor(MirroredVulnerabilityProcessor.PROCESSOR_NAME,
+ new MirroredVulnerabilityProcessor(), KafkaTopics.NEW_VULNERABILITY);
+ PROCESSOR_MANAGER.registerProcessor(RepositoryMetaResultProcessor.PROCESSOR_NAME,
+ new RepositoryMetaResultProcessor(), KafkaTopics.REPO_META_ANALYSIS_RESULT);
+ PROCESSOR_MANAGER.registerProcessor(VulnerabilityScanResultProcessor.PROCESSOR_NAME,
+ new VulnerabilityScanResultProcessor(), KafkaTopics.VULN_ANALYSIS_RESULT);
+ PROCESSOR_MANAGER.registerBatchProcessor(ProcessedVulnerabilityScanResultProcessor.PROCESSOR_NAME,
+ new ProcessedVulnerabilityScanResultProcessor(), KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED);
+ if (Config.getInstance().getPropertyAsBoolean(ConfigKey.TMP_DELAY_BOM_PROCESSED_NOTIFICATION)) {
+ PROCESSOR_MANAGER.registerBatchProcessor(DelayedBomProcessedNotificationProcessor.PROCESSOR_NAME,
+ new DelayedBomProcessedNotificationProcessor(), KafkaTopics.NOTIFICATION_PROJECT_VULN_ANALYSIS_COMPLETE);
+ }
+
+ PROCESSOR_MANAGER.startAll();
+ }
+
+ @Override
+ public void contextDestroyed(final ServletContextEvent event) {
+ LOGGER.info("Stopping processors");
+ PROCESSOR_MANAGER.close();
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessor.java
similarity index 95%
rename from src/main/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessor.java
rename to src/main/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessor.java
index 123f8e739..5b5f21670 100644
--- a/src/main/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessor.java
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessor.java
@@ -6,11 +6,12 @@
import com.github.packageurl.PackageURL;
import io.micrometer.core.instrument.Timer;
import org.apache.commons.lang3.StringUtils;
-import org.apache.kafka.streams.processor.api.Processor;
-import org.apache.kafka.streams.processor.api.Record;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.cyclonedx.proto.v1_4.Bom;
import org.cyclonedx.proto.v1_4.Component;
import org.cyclonedx.proto.v1_4.VulnerabilityAffects;
+import org.dependencytrack.event.kafka.processor.api.Processor;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
import org.dependencytrack.model.Vulnerability;
import org.dependencytrack.model.VulnerableSoftware;
import org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln;
@@ -27,16 +28,20 @@
import java.util.List;
import java.util.Optional;
+/**
+ * A {@link Processor} that ingests vulnerability data from CycloneDX Bill of Vulnerabilities.
+ */
+class MirroredVulnerabilityProcessor implements Processor {
-public class MirrorVulnerabilityProcessor implements Processor {
+ static final String PROCESSOR_NAME = "mirrored.vuln";
- private static final Logger LOGGER = Logger.getLogger(MirrorVulnerabilityProcessor.class);
+ private static final Logger LOGGER = Logger.getLogger(MirroredVulnerabilityProcessor.class);
private static final Timer TIMER = Timer.builder("vuln_mirror_processing")
.description("Time taken to process mirrored vulnerabilities")
.register(Metrics.getRegistry());
@Override
- public void process(final Record record) {
+ public void process(final ConsumerRecord record) throws ProcessingException {
final Timer.Sample timerSample = Timer.start();
try (QueryManager qm = new QueryManager().withL2CacheDisabled()) {
@@ -114,8 +119,7 @@ public void process(final Record record) {
qm.persist(synchronizedVulnerability);
// Event.dispatch(new IndexEvent(IndexEvent.Action.COMMIT, Vulnerability.class));
} catch (Exception e) {
- // TODO: Send record to a dead letter topic.
- LOGGER.error("Synchronizing vulnerability %s failed".formatted(record.key()), e);
+ throw new ProcessingException("Synchronizing vulnerability %s failed".formatted(record.key()), e);
} finally {
timerSample.stop(TIMER);
}
@@ -304,4 +308,5 @@ public VulnerableSoftware mapAffectedRangeToVulnerableSoftware(final QueryManage
vs.setVersionEndIncluding(versionEndIncluding);
return vs;
}
+
}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java
new file mode 100644
index 000000000..47c9a2175
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/ProcessedVulnerabilityScanResultProcessor.java
@@ -0,0 +1,201 @@
+package org.dependencytrack.event.kafka.processor;
+
+import alpine.common.logging.Logger;
+import alpine.event.framework.ChainableEvent;
+import alpine.event.framework.Event;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.event.ComponentMetricsUpdateEvent;
+import org.dependencytrack.event.ComponentPolicyEvaluationEvent;
+import org.dependencytrack.event.ProjectMetricsUpdateEvent;
+import org.dependencytrack.event.ProjectPolicyEvaluationEvent;
+import org.dependencytrack.event.kafka.processor.api.BatchProcessor;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
+import org.dependencytrack.model.VulnerabilityScan;
+import org.dependencytrack.model.VulnerabilityScan.Status;
+import org.dependencytrack.persistence.QueryManager;
+import org.dependencytrack.persistence.jdbi.mapping.VulnerabilityScanRowMapper;
+import org.dependencytrack.proto.vulnanalysis.v1.ScanResult;
+import org.jdbi.v3.core.Handle;
+import org.jdbi.v3.core.statement.PreparedBatch;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.UUID;
+
+import static java.lang.Math.toIntExact;
+import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi;
+import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED;
+
+/**
+ * A {@link BatchProcessor} that records successfully processed {@link ScanResult}s for their respective
+ * {@link VulnerabilityScan}, and triggers follow-up processes in case a scan is complete.
+ */
+class ProcessedVulnerabilityScanResultProcessor implements BatchProcessor {
+
+ static final String PROCESSOR_NAME = "processed.vuln.scan.result";
+
+ private static final Logger LOGGER = Logger.getLogger(ProcessedVulnerabilityScanResultProcessor.class);
+
+ @Override
+ public void process(final List> records) throws ProcessingException {
+ final List completedScans = processScanResults(records);
+ if (!completedScans.isEmpty() && LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Detected completion of %s vulnerability scans: %s"
+ .formatted(completedScans.size(), completedScans.stream().map(VulnerabilityScan::getToken).toList()));
+ }
+
+ triggerPolicyEvalAndMetricsUpdates(completedScans);
+ }
+
+ private static List processScanResults(final List> records) {
+ try (final var qm = new QueryManager()) {
+ return jdbi(qm).inTransaction(jdbiHandle -> {
+ final List completedScans = recordScanResults(jdbiHandle, records);
+ if (completedScans.isEmpty()) {
+ return completedScans;
+ }
+
+ updateWorkflowSteps(jdbiHandle, completedScans);
+
+ return completedScans;
+ });
+ }
+ }
+
+ private static List recordScanResults(final Handle jdbiHandle, final List> records) {
+ // As we may get multiple records for the same scan token,
+ // aggregate their respective values to reduce the number
+ // of SQL updates we have to execute.
+ final var aggregatesByToken = new HashMap();
+ for (final ConsumerRecord record : records) {
+ aggregatesByToken.compute(record.key(), (token, existingAggregate) -> {
+ final ResultAggregate aggregate = Optional.ofNullable(existingAggregate)
+ .orElseGet(ResultAggregate::new);
+ aggregate.results++;
+ aggregate.scannerResultsTotal += record.value().getScannerResultsCount();
+ aggregate.scannerResultsFailed += toIntExact(record.value().getScannerResultsList().stream()
+ .filter(result -> result.getStatus() == SCAN_STATUS_FAILED)
+ .count());
+ return aggregate;
+ });
+ }
+
+ final PreparedBatch preparedBatch = jdbiHandle.prepareBatch("""
+ UPDATE "VULNERABILITYSCAN"
+ SET
+ "RECEIVED_RESULTS" = "RECEIVED_RESULTS" + :results,
+ "SCAN_TOTAL" = "SCAN_TOTAL" + :scannerResultsTotal,
+ "SCAN_FAILED" = "SCAN_FAILED" + :scannerResultsFailed,
+ "STATUS" = (
+ CASE
+ WHEN "EXPECTED_RESULTS" = ("RECEIVED_RESULTS" + :results) THEN
+ CASE
+ WHEN
+ (("SCAN_FAILED" + :scannerResultsFailed)
+ / ("SCAN_TOTAL" + :scannerResultsTotal)) > "FAILURE_THRESHOLD"
+ THEN
+ 'FAILED'
+ ELSE
+ 'COMPLETED'
+ END
+ ELSE 'IN_PROGRESS'
+ END
+ ),
+ "UPDATED_AT" = NOW()
+ WHERE
+ "TOKEN" = :token
+ RETURNING
+ "TOKEN",
+ "STATUS",
+ "TARGET_TYPE",
+ "TARGET_IDENTIFIER",
+ CASE
+ WHEN
+ "STATUS" = 'FAILED'
+ THEN
+ 'Failure threshold of ' || "FAILURE_THRESHOLD" || '% exceeded: '
+ || "SCAN_FAILED" || '/' || "SCAN_TOTAL" || ' of scans failed'
+ END
+ """);
+
+ for (final Map.Entry tokenAndAggregate : aggregatesByToken.entrySet()) {
+ final String token = tokenAndAggregate.getKey();
+ final ResultAggregate aggregate = tokenAndAggregate.getValue();
+
+ preparedBatch
+ .bind("token", token)
+ .bind("results", aggregate.results)
+ .bind("scannerResultsTotal", aggregate.scannerResultsTotal)
+ .bind("scannerResultsFailed", aggregate.scannerResultsFailed)
+ .add();
+ }
+
+ return preparedBatch
+ .executePreparedBatch("TOKEN", "STATUS", "TARGET_TYPE", "TARGET_IDENTIFIER")
+ .map(new VulnerabilityScanRowMapper())
+ .stream()
+ // Unfortunately we can't perform this filtering in SQL, as RETURNING
+ // does not allow a WHERE clause. Tried using a CTE as workaround:
+ // WITH "CTE" AS (UPDATE ... RETURNING ...) SELECT * FROM "CTE"
+ // but that didn't return any results at all.
+ // The good news is that the query typically modifies only a handful
+ // of scans, so we're wasting not too many resources here.
+ .filter(vulnScan -> vulnScan.getStatus() == Status.COMPLETED
+ || vulnScan.getStatus() == Status.FAILED)
+ .toList();
+ }
+
+ private static void updateWorkflowSteps(final Handle jdbiHandle, final List completedScans) {
+ final PreparedBatch preparedBatch = jdbiHandle.prepareBatch("""
+ UPDATE "WORKFLOW_STATE"
+ SET
+ "STATUS" = :status,
+ "FAILURE_REASON" = :failureReason,
+ "UPDATED_AT" = NOW()
+ WHERE
+ "TOKEN" = :token
+ """);
+
+ for (final VulnerabilityScan vulnScan : completedScans) {
+ preparedBatch
+ .bind("token", vulnScan.getToken())
+ .bind("status", vulnScan.getStatus())
+ .bind("failureReason", vulnScan.getFailureReason())
+ .add();
+ }
+
+ preparedBatch.executePreparedBatch();
+ }
+
+ private static void triggerPolicyEvalAndMetricsUpdates(final List completedScans) {
+ for (final VulnerabilityScan vulnScan : completedScans) {
+ final ChainableEvent policyEvaluationEvent = switch (vulnScan.getTargetType()) {
+ case COMPONENT -> new ComponentPolicyEvaluationEvent(vulnScan.getTargetIdentifier());
+ case PROJECT -> new ProjectPolicyEvaluationEvent(vulnScan.getTargetIdentifier());
+ };
+ policyEvaluationEvent.setChainIdentifier(UUID.fromString(vulnScan.getToken()));
+
+ final ChainableEvent metricsUpdateEvent = switch (vulnScan.getTargetType()) {
+ case COMPONENT -> new ComponentMetricsUpdateEvent(vulnScan.getTargetIdentifier());
+ case PROJECT -> new ProjectMetricsUpdateEvent(vulnScan.getTargetIdentifier());
+ };
+ metricsUpdateEvent.setChainIdentifier(UUID.fromString(vulnScan.getToken()));
+
+ policyEvaluationEvent.onFailure(metricsUpdateEvent);
+ policyEvaluationEvent.onSuccess(metricsUpdateEvent);
+
+ Event.dispatch(policyEvaluationEvent);
+ }
+ }
+
+ private static class ResultAggregate {
+
+ private int results;
+ private int scannerResultsTotal;
+ private int scannerResultsFailed;
+
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessor.java
index 8a4eb6f5a..077862093 100644
--- a/src/main/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessor.java
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessor.java
@@ -6,8 +6,9 @@
import com.github.packageurl.PackageURL;
import io.micrometer.core.instrument.Timer;
import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.kafka.streams.processor.api.Processor;
-import org.apache.kafka.streams.processor.api.Record;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.event.kafka.processor.api.Processor;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
import org.dependencytrack.model.FetchStatus;
import org.dependencytrack.model.IntegrityMetaComponent;
import org.dependencytrack.model.RepositoryMetaComponent;
@@ -27,9 +28,11 @@
import static org.dependencytrack.event.kafka.componentmeta.IntegrityCheck.performIntegrityCheck;
/**
- * A {@link Processor} responsible for processing result of component repository meta analyses.
+ * A {@link Processor} that ingests repository metadata {@link AnalysisResult}s.
*/
-public class RepositoryMetaResultProcessor implements Processor {
+class RepositoryMetaResultProcessor implements Processor {
+
+ static final String PROCESSOR_NAME = "repo.meta.result";
private static final Logger LOGGER = Logger.getLogger(RepositoryMetaResultProcessor.class);
private static final Timer TIMER = Timer.builder("repo_meta_result_processing")
@@ -37,7 +40,7 @@ public class RepositoryMetaResultProcessor implements Processor record) {
+ public void process(final ConsumerRecord record) throws ProcessingException {
final Timer.Sample timerSample = Timer.start();
if (!isRecordValid(record)) {
return;
@@ -49,13 +52,13 @@ public void process(final Record record) {
performIntegrityCheck(integrityMetaComponent, record.value(), qm);
}
} catch (Exception e) {
- LOGGER.error("An unexpected error occurred while processing record %s".formatted(record), e);
+ throw new ProcessingException("An unexpected error occurred while processing record %s".formatted(record), e);
} finally {
timerSample.stop(TIMER);
}
}
- private IntegrityMetaComponent synchronizeIntegrityMetadata(final QueryManager queryManager, final Record record) throws MalformedPackageURLException {
+ private IntegrityMetaComponent synchronizeIntegrityMetadata(final QueryManager queryManager, final ConsumerRecord record) throws MalformedPackageURLException {
final AnalysisResult result = record.value();
PackageURL purl = new PackageURL(result.getComponent().getPurl());
if (result.hasIntegrityMeta()) {
@@ -66,7 +69,7 @@ private IntegrityMetaComponent synchronizeIntegrityMetadata(final QueryManager q
}
}
- private void synchronizeRepositoryMetadata(final QueryManager queryManager, final Record record) throws Exception {
+ private void synchronizeRepositoryMetadata(final QueryManager queryManager, final ConsumerRecord record) throws Exception {
PersistenceManager pm = queryManager.getPersistenceManager();
final AnalysisResult result = record.value();
PackageURL purl = new PackageURL(result.getComponent().getPurl());
@@ -104,7 +107,7 @@ private void synchronizeRepositoryMetadata(final QueryManager queryManager, fina
}
}
- private RepositoryMetaComponent createRepositoryMetaResult(Record incomingAnalysisResultRecord, PersistenceManager pm, PackageURL purl) throws Exception {
+ private RepositoryMetaComponent createRepositoryMetaResult(ConsumerRecord incomingAnalysisResultRecord, PersistenceManager pm, PackageURL purl) throws Exception {
final AnalysisResult result = incomingAnalysisResultRecord.value();
if (result.hasLatestVersion()) {
try (final Query query = pm.newQuery(RepositoryMetaComponent.class)) {
@@ -145,7 +148,7 @@ private RepositoryMetaComponent createRepositoryMetaResult(Record incomingAnalysisResultRecord, QueryManager queryManager, PackageURL purl) {
+ private IntegrityMetaComponent synchronizeIntegrityMetaResult(final ConsumerRecord incomingAnalysisResultRecord, QueryManager queryManager, PackageURL purl) {
final AnalysisResult result = incomingAnalysisResultRecord.value();
IntegrityMetaComponent persistentIntegrityMetaComponent = queryManager.getIntegrityMetaComponent(purl.toString());
if (persistentIntegrityMetaComponent != null && persistentIntegrityMetaComponent.getStatus() != null && persistentIntegrityMetaComponent.getStatus().equals(FetchStatus.PROCESSED)) {
@@ -180,7 +183,7 @@ private IntegrityMetaComponent synchronizeIntegrityMetaResult(final Record record) {
+ private static boolean isRecordValid(final ConsumerRecord record) {
final AnalysisResult result = record.value();
if (!result.hasComponent()) {
LOGGER.warn("""
@@ -201,4 +204,5 @@ private static boolean isRecordValid(final Record record
}
return true;
}
+
}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java
index 7e16f8385..3385701fc 100644
--- a/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessor.java
@@ -2,19 +2,19 @@
import alpine.Config;
import alpine.common.logging.Logger;
-import alpine.common.metrics.Metrics;
-import alpine.notification.Notification;
-import alpine.notification.NotificationLevel;
import com.google.protobuf.Any;
import com.google.protobuf.Timestamp;
import com.google.protobuf.util.Timestamps;
-import io.micrometer.core.instrument.Timer;
-import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor;
-import org.apache.kafka.streams.processor.api.ContextualProcessor;
-import org.apache.kafka.streams.processor.api.FixedKeyRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent;
+import org.dependencytrack.event.kafka.KafkaEvent;
+import org.dependencytrack.event.kafka.KafkaEventConverter;
import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.event.kafka.KafkaEventHeaders;
+import org.dependencytrack.event.kafka.KafkaTopics;
import org.dependencytrack.event.kafka.KafkaUtil;
+import org.dependencytrack.event.kafka.processor.api.Processor;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
import org.dependencytrack.model.AnalysisJustification;
import org.dependencytrack.model.AnalysisResponse;
import org.dependencytrack.model.AnalysisState;
@@ -25,8 +25,6 @@
import org.dependencytrack.model.VulnerabilityAnalysisLevel;
import org.dependencytrack.model.mapping.PolicyProtoMapper;
import org.dependencytrack.notification.NotificationConstants;
-import org.dependencytrack.notification.NotificationGroup;
-import org.dependencytrack.notification.NotificationScope;
import org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln;
import org.dependencytrack.persistence.QueryManager;
import org.dependencytrack.persistence.jdbi.NotificationSubjectDao;
@@ -34,13 +32,15 @@
import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyEvaluator;
import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating;
import org.dependencytrack.proto.notification.v1.Group;
+import org.dependencytrack.proto.notification.v1.Level;
+import org.dependencytrack.proto.notification.v1.Notification;
+import org.dependencytrack.proto.notification.v1.Scope;
import org.dependencytrack.proto.vulnanalysis.v1.ScanKey;
import org.dependencytrack.proto.vulnanalysis.v1.ScanResult;
import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus;
import org.dependencytrack.proto.vulnanalysis.v1.Scanner;
import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult;
import org.dependencytrack.util.PersistenceUtil;
-import org.dependencytrack.util.PersistenceUtil.Differ;
import org.jdbi.v3.core.mapper.reflect.ColumnName;
import org.jdbi.v3.sqlobject.config.RegisterBeanMapper;
import org.jdbi.v3.sqlobject.config.RegisterConstructorMapper;
@@ -86,38 +86,46 @@
import static org.dependencytrack.util.VulnerabilityUtil.isAuthoritativeSource;
import static org.dependencytrack.util.VulnerabilityUtil.isMirroringEnabled;
-/**
- * A {@link ContextualProcessor} responsible for processing {@link ScanResult}s.
- */
-public class VulnerabilityScanResultProcessor extends ContextualFixedKeyProcessor {
+class VulnerabilityScanResultProcessor implements Processor {
+
+ static final String PROCESSOR_NAME = "vuln.scan.result";
private static final Logger LOGGER = Logger.getLogger(VulnerabilityScanResultProcessor.class);
- private static final Timer TIMER = Timer.builder("vuln_scan_result_processing")
- .description("Time taken to process vulnerability scan results")
- .register(Metrics.getRegistry());
- private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher();
+ private final KafkaEventDispatcher eventDispatcher;
private final VulnerabilityPolicyEvaluator vulnPolicyEvaluator;
+ private final ThreadLocal>> eventsToDispatch = ThreadLocal.withInitial(ArrayList::new);
- public VulnerabilityScanResultProcessor() {
- this(Config.getInstance().getPropertyAsBoolean(VULNERABILITY_POLICY_ANALYSIS_ENABLED)
+ VulnerabilityScanResultProcessor() {
+ this(new KafkaEventDispatcher(), Config.getInstance().getPropertyAsBoolean(VULNERABILITY_POLICY_ANALYSIS_ENABLED)
? ServiceLoader.load(VulnerabilityPolicyEvaluator.class).findFirst().orElseThrow()
: null);
}
- VulnerabilityScanResultProcessor(final VulnerabilityPolicyEvaluator vulnPolicyEvaluator) {
+ VulnerabilityScanResultProcessor(final KafkaEventDispatcher eventDispatcher,
+ final VulnerabilityPolicyEvaluator vulnPolicyEvaluator) {
+ this.eventDispatcher = eventDispatcher;
this.vulnPolicyEvaluator = vulnPolicyEvaluator;
}
@Override
- public void process(final FixedKeyRecord record) {
+ public void process(final ConsumerRecord record) throws ProcessingException {
+ try {
+ processInternal(record);
+ eventDispatcher.dispatchAllBlocking(eventsToDispatch.get());
+ } finally {
+ eventsToDispatch.get().clear();
+ }
+ }
+
+ private void processInternal(final ConsumerRecord record) {
final ScanKey scanKey = record.key();
- final ScanResult result = record.value();
+ final ScanResult scanResult = record.value();
final UUID componentUuid = UUID.fromString(scanKey.getComponentUuid());
final VulnerabilityAnalysisLevel analysisLevel = determineAnalysisLevel(record);
final boolean isNewComponent = determineIsComponentNew(record);
+ maybeQueueResultProcessedEvent(scanKey, scanResult);
- final Timer.Sample timerSample = Timer.start();
try (final var qm = new QueryManager()) {
// Do not unload fields upon commit (why is this even the default WTF).
qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true");
@@ -130,15 +138,9 @@ public void process(final FixedKeyRecord record) {
return;
}
- for (final ScannerResult scannerResult : result.getScannerResultsList()) {
+ for (final ScannerResult scannerResult : scanResult.getScannerResultsList()) {
processScannerResult(qm, component, scanKey, scannerResult, analysisLevel, isNewComponent);
}
- } catch (Exception e) {
- LOGGER.error("Failed to process scan result for component %s (scanKey: %s)"
- .formatted(componentUuid, prettyPrint(scanKey)), e);
- } finally {
- timerSample.stop(TIMER);
- context().forward(record);
}
}
@@ -149,12 +151,15 @@ private void processScannerResult(final QueryManager qm, final Component compone
if (scannerResult.getStatus() == SCAN_STATUS_FAILED) {
final var message = "Scan of component %s with %s failed (scanKey: %s): %s"
.formatted(component.uuid(), scannerResult.getScanner(), prettyPrint(scanKey), scannerResult.getFailureReason());
- eventDispatcher.dispatchAsync(component.projectUuid(), new Notification()
- .scope(NotificationScope.SYSTEM)
- .group(NotificationGroup.ANALYZER)
- .level(NotificationLevel.ERROR)
- .title(NotificationConstants.Title.ANALYZER_ERROR)
- .content(message));
+ final var notification = Notification.newBuilder()
+ .setScope(Scope.SCOPE_SYSTEM)
+ .setGroup(Group.GROUP_ANALYZER)
+ .setLevel(Level.LEVEL_ERROR)
+ .setTimestamp(Timestamps.now())
+ .setTitle(NotificationConstants.Title.ANALYZER_ERROR)
+ .setContent(message)
+ .build();
+ eventsToDispatch.get().add(KafkaEventConverter.convert(component.projectUuid().toString(), notification));
LOGGER.warn(message);
return;
} else if (scannerResult.getStatus() != ScanStatus.SCAN_STATUS_SUCCESSFUL) {
@@ -176,7 +181,7 @@ private void processScannerResult(final QueryManager qm, final Component compone
LOGGER.debug("Identified %d new vulnerabilities for %s with %s (scanKey: %s)"
.formatted(newVulnUuids.size(), scanKey.getComponentUuid(), scannerResult.getScanner(), prettyPrint(scanKey)));
- maybeSendNotifications(qm, component, isNewComponent, analysisLevel, newVulnUuids);
+ maybeQueueNotifications(qm, component, isNewComponent, analysisLevel, newVulnUuids);
}
/**
@@ -272,7 +277,7 @@ private Vulnerability syncVulnerability(final QueryManager qm, final Vulnerabili
}
if (canUpdateVulnerability(existingVuln, scanner)) {
- final var differ = new Differ<>(existingVuln, vuln);
+ final var differ = new PersistenceUtil.Differ<>(existingVuln, vuln);
// TODO: Consider using something like javers to get a rich diff of WHAT changed; https://github.com/javers/javers
differ.applyIfChanged("title", Vulnerability::getTitle, existingVuln::setTitle);
@@ -617,6 +622,31 @@ private List maybeApplyPolicyAnalyses(final Dao dao, final Compon
.toList();
}
+ private void maybeQueueResultProcessedEvent(final ScanKey scanKey, final ScanResult scanResult) {
+ // Vulnerability scans targeting the entire portfolio are currently not tracked.
+ // There's no point in including results in the following repartition, and querying
+ // the database for their scan token, given the queries will never return anything anyway.
+ // Filtering results of portfolio analyses here also reduces the chance of hot partitions.
+ if (PortfolioVulnerabilityAnalysisEvent.CHAIN_IDENTIFIER.toString().equals(scanKey.getScanToken())) {
+ return;
+ }
+
+ // Drop vulnerabilities from scanner results, as they can be rather large, and we don't need them anymore.
+ // Dropping them will save us some compression and network overhead during the repartition.
+ // We can remove this step should we ever need access to the vulnerabilities again.
+ final ScanResult strippedScanResult = scanResult.toBuilder()
+ .clearScannerResults()
+ .addAllScannerResults(scanResult.getScannerResultsList().stream()
+ .map(scannerResult -> scannerResult.toBuilder()
+ .clearBom()
+ .build())
+ .toList())
+ .build();
+
+ final var event = new KafkaEvent<>(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, scanKey.getScanToken(), strippedScanResult);
+ eventsToDispatch.get().add(event);
+ }
+
/**
* Send {@link Group#GROUP_NEW_VULNERABLE_DEPENDENCY} and {@link Group#GROUP_NEW_VULNERABILITY} notifications
* for a given {@link Component}, if it was found to have at least one non-suppressed vulnerability.
@@ -627,18 +657,17 @@ private List maybeApplyPolicyAnalyses(final Dao dao, final Compon
* @param analysisLevel The {@link VulnerabilityAnalysisLevel}
* @param newVulns Newly identified {@link Vulnerability}s
*/
- private void maybeSendNotifications(final QueryManager qm, final Component component, final boolean isNewComponent,
- final VulnerabilityAnalysisLevel analysisLevel, final List newVulns) {
+ private void maybeQueueNotifications(final QueryManager qm, final Component component, final boolean isNewComponent,
+ final VulnerabilityAnalysisLevel analysisLevel, final List newVulns) {
if (newVulns.isEmpty()) {
return;
}
final Timestamp notificationTimestamp = Timestamps.now();
- final var notifications = new ArrayList();
jdbi(qm).useExtension(NotificationSubjectDao.class, dao -> {
if (isNewComponent) {
dao.getForNewVulnerableDependency(component.uuid())
- .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder()
+ .map(subject -> Notification.newBuilder()
.setScope(SCOPE_PORTFOLIO)
.setGroup(GROUP_NEW_VULNERABLE_DEPENDENCY)
.setLevel(LEVEL_INFORMATIONAL)
@@ -647,11 +676,12 @@ private void maybeSendNotifications(final QueryManager qm, final Component compo
.setContent(generateNotificationContent(subject.getComponent(), subject.getVulnerabilitiesList()))
.setSubject(Any.pack(subject))
.build())
- .ifPresent(notifications::add);
+ .map(notification -> KafkaEventConverter.convert(component.projectUuid().toString(), notification))
+ .ifPresent(eventsToDispatch.get()::add);
}
dao.getForNewVulnerabilities(component.uuid(), newVulns.stream().map(Vulnerability::getUuid).toList(), analysisLevel).stream()
- .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder()
+ .map(subject -> Notification.newBuilder()
.setScope(SCOPE_PORTFOLIO)
.setGroup(GROUP_NEW_VULNERABILITY)
.setLevel(LEVEL_INFORMATIONAL)
@@ -660,12 +690,9 @@ private void maybeSendNotifications(final QueryManager qm, final Component compo
.setContent(generateNotificationContent(subject.getVulnerability()))
.setSubject(Any.pack(subject))
.build())
- .forEach(notifications::add);
+ .map(notification -> KafkaEventConverter.convert(component.projectUuid().toString(), notification))
+ .forEach(eventsToDispatch.get()::add);
});
-
- for (final org.dependencytrack.proto.notification.v1.Notification notification : notifications) {
- eventDispatcher.dispatchAsync(component.projectUuid().toString(), notification);
- }
}
private boolean canUpdateVulnerability(final Vulnerability vuln, final Scanner scanner) {
@@ -682,17 +709,17 @@ private boolean canUpdateVulnerability(final Vulnerability vuln, final Scanner s
// it should be able to update it. This will be the case for the OSS Index scanner
// and sonatype-XXX vulnerabilities for example.
canUpdate &= isAuthoritativeSource(vuln, convert(scanner))
- // Alternatively, if the vulnerability could be mirrored, but mirroring
- // is disabled, it is OK to override any existing data.
- //
- // Ideally, we'd track the data from all sources instead of just overriding
- // it, but for now this will have to do it.
- || (canBeMirrored(vuln) && !isMirroringEnabled(vuln));
+ // Alternatively, if the vulnerability could be mirrored, but mirroring
+ // is disabled, it is OK to override any existing data.
+ //
+ // Ideally, we'd track the data from all sources instead of just overriding
+ // it, but for now this will have to do it.
+ || (canBeMirrored(vuln) && !isMirroringEnabled(vuln));
return canUpdate;
}
- private static VulnerabilityAnalysisLevel determineAnalysisLevel(final FixedKeyRecord, ?> record) {
+ private static VulnerabilityAnalysisLevel determineAnalysisLevel(final ConsumerRecord, ?> record) {
return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.VULN_ANALYSIS_LEVEL)
.map(value -> {
try {
@@ -706,7 +733,7 @@ private static VulnerabilityAnalysisLevel determineAnalysisLevel(final FixedKeyR
.orElse(VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS);
}
- private static boolean determineIsComponentNew(final FixedKeyRecord, ?> record) {
+ private static boolean determineIsComponentNew(final ConsumerRecord, ?> record) {
return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.IS_NEW_COMPONENT)
.map(Boolean::parseBoolean)
.orElse(false);
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/AbstractProcessingStrategy.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/AbstractProcessingStrategy.java
new file mode 100644
index 000000000..27d198b56
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/AbstractProcessingStrategy.java
@@ -0,0 +1,87 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.errors.SerializationException;
+import org.apache.kafka.common.serialization.Serde;
+import org.datanucleus.api.jdo.exceptions.ConnectionInUseException;
+import org.datanucleus.store.query.QueryInterruptedException;
+import org.dependencytrack.event.kafka.processor.exception.RetryableProcessingException;
+import org.postgresql.util.PSQLState;
+
+import javax.jdo.JDOOptimisticVerificationException;
+import java.net.SocketTimeoutException;
+import java.sql.SQLException;
+import java.sql.SQLTransientConnectionException;
+import java.sql.SQLTransientException;
+import java.util.List;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * An abstract {@link ProcessingStrategy} that provides various shared functionality.
+ *
+ * @param Type of the {@link ConsumerRecord} key
+ * @param Type of the {@link ConsumerRecord} value
+ */
+abstract class AbstractProcessingStrategy implements ProcessingStrategy {
+
+ private final Serde keySerde;
+ private final Serde valueSerde;
+
+ AbstractProcessingStrategy(final Serde keySerde, final Serde valueSerde) {
+ this.keySerde = keySerde;
+ this.valueSerde = valueSerde;
+ }
+
+ /**
+ * @param record The {@link ConsumerRecord} to deserialize key and value of
+ * @return A {@link ConsumerRecord} with deserialized key and value
+ * @throws SerializationException When deserializing the {@link ConsumerRecord} failed
+ */
+ ConsumerRecord deserialize(final ConsumerRecord record) {
+ final K deserializedKey;
+ final V deserializedValue;
+ try {
+ deserializedKey = keySerde.deserializer().deserialize(record.topic(), record.key());
+ deserializedValue = valueSerde.deserializer().deserialize(record.topic(), record.value());
+ } catch (RuntimeException e) {
+ if (e instanceof SerializationException) {
+ throw e;
+ }
+
+ throw new SerializationException(e);
+ }
+
+ return new ConsumerRecord<>(record.topic(), record.partition(), record.offset(),
+ record.timestamp(), record.timestampType(), record.serializedKeySize(), record.serializedValueSize(),
+ deserializedKey, deserializedValue, record.headers(), record.leaderEpoch());
+ }
+
+ private static final List> KNOWN_TRANSIENT_EXCEPTIONS = List.of(
+ ConnectTimeoutException.class,
+ ConnectionInUseException.class,
+ JDOOptimisticVerificationException.class,
+ QueryInterruptedException.class,
+ SocketTimeoutException.class,
+ SQLTransientException.class,
+ SQLTransientConnectionException.class,
+ TimeoutException.class
+ );
+
+ boolean isRetryableException(final Throwable throwable) {
+ if (throwable instanceof RetryableProcessingException) {
+ return true;
+ }
+
+ final boolean isKnownTransientException = ExceptionUtils.getThrowableList(throwable).stream()
+ .anyMatch(cause -> KNOWN_TRANSIENT_EXCEPTIONS.contains(cause.getClass()));
+ if (isKnownTransientException) {
+ return true;
+ }
+
+ return ExceptionUtils.getRootCause(throwable) instanceof final SQLException se
+ && PSQLState.isConnectionError(se.getSQLState());
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessingStrategy.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessingStrategy.java
new file mode 100644
index 000000000..343e8aa7e
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessingStrategy.java
@@ -0,0 +1,65 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import alpine.common.logging.Logger;
+import io.confluent.parallelconsumer.PCRetriableException;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.errors.SerializationException;
+import org.apache.kafka.common.serialization.Serde;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A {@link ProcessingStrategy} that processes records in batches.
+ *
+ * @param Type of the {@link ConsumerRecord} key
+ * @param Type of the {@link ConsumerRecord} value
+ */
+class BatchProcessingStrategy extends AbstractProcessingStrategy {
+
+ private static final Logger LOGGER = Logger.getLogger(BatchProcessingStrategy.class);
+
+ private final BatchProcessor batchConsumer;
+
+ BatchProcessingStrategy(final BatchProcessor batchConsumer,
+ final Serde keySerde, final Serde valueSerde) {
+ super(keySerde, valueSerde);
+ this.batchConsumer = batchConsumer;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void processRecords(final List> records) {
+ final var deserializedRecords = new ArrayList>(records.size());
+ for (final ConsumerRecord record : records) {
+ try {
+ deserializedRecords.add(deserialize(record));
+ } catch (SerializationException e) {
+ // TODO: Consider supporting error handlers, e.g. to send record to DLT.
+ LOGGER.error("Failed to deserialize consumer record %s; Skipping".formatted(record), e);
+ }
+ }
+
+ if (deserializedRecords.isEmpty()) {
+ LOGGER.warn("All of the %d records in this batch failed to be deserialized".formatted(records.size()));
+ return;
+ }
+
+ try {
+ batchConsumer.process(deserializedRecords);
+ } catch (ProcessingException | RuntimeException e) {
+ if (isRetryableException(e)) {
+ LOGGER.warn("Encountered retryable exception while processing %d records".formatted(deserializedRecords.size()), e);
+ throw new PCRetriableException(e);
+ }
+
+ LOGGER.error("Encountered non-retryable exception while processing %d records; Skipping".formatted(deserializedRecords.size()), e);
+ // TODO: Consider supporting error handlers, e.g. to send records to DLT.
+ // Skip records to avoid poison-pill scenario.
+ }
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessor.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessor.java
new file mode 100644
index 000000000..5e1b6ff03
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessor.java
@@ -0,0 +1,26 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
+
+import java.util.List;
+
+/**
+ * A processor of {@link ConsumerRecord} batches.
+ *
+ * @param Type of the {@link ConsumerRecord} key
+ * @param Type of the {@link ConsumerRecord} value
+ */
+public interface BatchProcessor {
+
+ /**
+ * Process a batch of {@link ConsumerRecord}s.
+ *
+ * This method may be called by multiple threads concurrently and thus MUST be thread safe!
+ *
+ * @param records Batch of {@link ConsumerRecord}s to process
+ * @throws ProcessingException When consuming the batch of {@link ConsumerRecord}s failed
+ */
+ void process(final List> records) throws ProcessingException;
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessingStrategy.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessingStrategy.java
new file mode 100644
index 000000000..459f80078
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessingStrategy.java
@@ -0,0 +1,16 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+
+import java.util.List;
+
+interface ProcessingStrategy {
+
+ /**
+ * Process zero or more {@link ConsumerRecord}s.
+ *
+ * @param records The {@link ConsumerRecord}s to process
+ */
+ void processRecords(final List> records);
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/Processor.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/Processor.java
new file mode 100644
index 000000000..e905a7937
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/Processor.java
@@ -0,0 +1,24 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
+
+/**
+ * A processor of individual {@link ConsumerRecord}s.
+ *
+ * @param Type of the {@link ConsumerRecord} key
+ * @param Type of the {@link ConsumerRecord} value
+ */
+public interface Processor {
+
+ /**
+ * Process a {@link ConsumerRecord}.
+ *
+ * This method may be called by multiple threads concurrently and thus MUST be thread safe!
+ *
+ * @param record The {@link ConsumerRecord} to process
+ * @throws ProcessingException When processing the {@link ConsumerRecord} failed
+ */
+ void process(final ConsumerRecord record) throws ProcessingException;
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java
new file mode 100644
index 000000000..4916c87a6
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessorManager.java
@@ -0,0 +1,294 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import alpine.Config;
+import alpine.common.logging.Logger;
+import alpine.common.metrics.Metrics;
+import io.confluent.parallelconsumer.ParallelConsumerOptions;
+import io.confluent.parallelconsumer.ParallelConsumerOptions.ProcessingOrder;
+import io.confluent.parallelconsumer.ParallelEoSStreamProcessor;
+import io.confluent.parallelconsumer.ParallelStreamProcessor;
+import io.github.resilience4j.core.IntervalFunction;
+import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.ByteArrayDeserializer;
+import org.dependencytrack.event.kafka.KafkaTopics.Topic;
+import org.eclipse.microprofile.health.HealthCheckResponse;
+
+import java.time.Duration;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.regex.Pattern;
+
+import static org.apache.kafka.clients.CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG;
+import static org.apache.kafka.clients.CommonClientConfigs.CLIENT_ID_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
+import static org.dependencytrack.common.ConfigKey.KAFKA_BOOTSTRAP_SERVERS;
+
+public class ProcessorManager implements AutoCloseable {
+
+ private static final Logger LOGGER = Logger.getLogger(ProcessorManager.class);
+ private static final Pattern PROCESSOR_NAME_PATTERN = Pattern.compile("^[a-z.]+$");
+
+ private static final String PROPERTY_MAX_BATCH_SIZE = "max.batch.size";
+ private static final int PROPERTY_MAX_BATCH_SIZE_DEFAULT = 10;
+ private static final String PROPERTY_MAX_CONCURRENCY = "max.concurrency";
+ private static final int PROPERTY_MAX_CONCURRENCY_DEFAULT = 1;
+ private static final String PROPERTY_PROCESSING_ORDER = "processing.order";
+ private static final ProcessingOrder PROPERTY_PROCESSING_ORDER_DEFAULT = ProcessingOrder.PARTITION;
+ private static final String PROPERTY_RETRY_INITIAL_DELAY_MS = "retry.initial.delay.ms";
+ private static final long PROPERTY_RETRY_INITIAL_DELAY_MS_DEFAULT = 1000; // 1s
+ private static final String PROPERTY_RETRY_MULTIPLIER = "retry.multiplier";
+ private static final int PROPERTY_RETRY_MULTIPLIER_DEFAULT = 1;
+ private static final String PROPERTY_RETRY_RANDOMIZATION_FACTOR = "retry.randomization.factor";
+ private static final double PROPERTY_RETRY_RANDOMIZATION_FACTOR_DEFAULT = 0.3;
+ private static final String PROPERTY_RETRY_MAX_DELAY_MS = "retry.max.delay.ms";
+ private static final long PROPERTY_RETRY_MAX_DELAY_MS_DEFAULT = 60 * 1000; // 60s
+
+ private final Map managedProcessors = new LinkedHashMap<>();
+ private final Config config;
+
+ public ProcessorManager() {
+ this(Config.getInstance());
+ }
+
+ public ProcessorManager(final Config config) {
+ this.config = config;
+ }
+
+ /**
+ * Register a new {@link Processor}.
+ *
+ * @param name Name of the processor to register
+ * @param processor The processor to register
+ * @param topic The topic to have the processor subscribe to
+ * @param Type of record keys in the topic
+ * @param Type of record values in the topic
+ */
+ public void registerProcessor(final String name, final Processor processor, final Topic topic) {
+ requireValidProcessorName(name);
+ final var processingStrategy = new SingleRecordProcessingStrategy<>(processor, topic.keySerde(), topic.valueSerde());
+ final ParallelStreamProcessor parallelConsumer = createParallelConsumer(name, false);
+ managedProcessors.put(name, new ManagedProcessor(parallelConsumer, processingStrategy, topic.name()));
+ }
+
+ /**
+ * Register a new {@link BatchProcessor}.
+ *
+ * @param name Name of the processor to register
+ * @param processor The processor to register
+ * @param topic The topic to have the processor subscribe to
+ * @param Type of record keys in the topic
+ * @param Type of record values in the topic
+ */
+ public void registerBatchProcessor(final String name, final BatchProcessor processor, final Topic topic) {
+ requireValidProcessorName(name);
+ final var processingStrategy = new BatchProcessingStrategy<>(processor, topic.keySerde(), topic.valueSerde());
+ final ParallelStreamProcessor parallelConsumer = createParallelConsumer(name, true);
+ managedProcessors.put(name, new ManagedProcessor(parallelConsumer, processingStrategy, topic.name()));
+ }
+
+ @SuppressWarnings("resource")
+ public void startAll() {
+ for (final Map.Entry entry : managedProcessors.entrySet()) {
+ final String processorName = entry.getKey();
+ final ManagedProcessor managedProcessor = entry.getValue();
+
+ LOGGER.info("Starting processor %s".formatted(processorName));
+ managedProcessor.parallelConsumer().subscribe(List.of(managedProcessor.topic()));
+ managedProcessor.parallelConsumer().poll(pollCtx -> {
+ final List> polledRecords = pollCtx.getConsumerRecordsFlattened();
+ managedProcessor.processingStrategy().processRecords(polledRecords);
+ });
+ }
+ }
+
+ public HealthCheckResponse probeHealth() {
+ final var responseBuilder = HealthCheckResponse.named("kafka-processors");
+
+ boolean isUp = true;
+ for (final Map.Entry entry : managedProcessors.entrySet()) {
+ final String processorName = entry.getKey();
+ final ParallelStreamProcessor, ?> parallelConsumer = entry.getValue().parallelConsumer();
+ final boolean isProcessorUp = !parallelConsumer.isClosedOrFailed();
+
+ responseBuilder.withData(processorName, isProcessorUp
+ ? HealthCheckResponse.Status.UP.name()
+ : HealthCheckResponse.Status.DOWN.name());
+ if (isProcessorUp
+ && parallelConsumer instanceof final ParallelEoSStreamProcessor, ?> concreteParallelConsumer
+ && concreteParallelConsumer.getFailureCause() != null) {
+ responseBuilder.withData("%s_failure_reason".formatted(processorName),
+ concreteParallelConsumer.getFailureCause().getMessage());
+ }
+
+ isUp &= isProcessorUp;
+ }
+
+ return responseBuilder.status(isUp).build();
+ }
+
+ @Override
+ @SuppressWarnings("resource")
+ public void close() {
+ final Iterator> entryIterator = managedProcessors.entrySet().iterator();
+ while (entryIterator.hasNext()) {
+ final Map.Entry entry = entryIterator.next();
+ final String processorName = entry.getKey();
+ final ManagedProcessor managedProcessor = entry.getValue();
+
+ LOGGER.info("Stopping processor %s".formatted(processorName));
+ managedProcessor.parallelConsumer().closeDontDrainFirst();
+ entryIterator.remove();
+ }
+ }
+
+ private ParallelStreamProcessor createParallelConsumer(final String processorName, final boolean isBatch) {
+ final var optionsBuilder = ParallelConsumerOptions.builder()
+ .consumer(createConsumer(processorName));
+
+ final Map properties = getPassThroughProperties(processorName.toLowerCase());
+
+ final ProcessingOrder processingOrder = Optional.ofNullable(properties.get(PROPERTY_PROCESSING_ORDER))
+ .map(String::toUpperCase)
+ .map(ProcessingOrder::valueOf)
+ .orElse(PROPERTY_PROCESSING_ORDER_DEFAULT);
+ optionsBuilder.ordering(processingOrder);
+
+ final int maxConcurrency = Optional.ofNullable(properties.get(PROPERTY_MAX_CONCURRENCY))
+ .map(Integer::parseInt)
+ .orElse(PROPERTY_MAX_CONCURRENCY_DEFAULT);
+ optionsBuilder.maxConcurrency(maxConcurrency);
+
+ final Optional optionalMaxBatchSizeProperty = Optional.ofNullable(properties.get(PROPERTY_MAX_BATCH_SIZE));
+ if (isBatch) {
+ if (processingOrder == ProcessingOrder.PARTITION) {
+ LOGGER.warn("""
+ Processor %s is configured to use batching with processing order %s; \
+ Batch sizes are limited by the number of partitions in the topic, \
+ and may thus not yield the desired effect \
+ (https://github.com/confluentinc/parallel-consumer/issues/551)\
+ """.formatted(processorName, processingOrder));
+ }
+
+ final int maxBatchSize = optionalMaxBatchSizeProperty
+ .map(Integer::parseInt)
+ .orElse(PROPERTY_MAX_BATCH_SIZE_DEFAULT);
+ optionsBuilder.batchSize(maxBatchSize);
+ } else if (optionalMaxBatchSizeProperty.isPresent()) {
+ LOGGER.warn("Processor %s is configured with %s, but it is not a batch processor; Ignoring property"
+ .formatted(processorName, PROPERTY_MAX_BATCH_SIZE));
+ }
+
+ final IntervalFunction retryIntervalFunction = getRetryIntervalFunction(properties);
+ optionsBuilder.retryDelayProvider(recordCtx -> {
+ final long delayMillis = retryIntervalFunction.apply(recordCtx.getNumberOfFailedAttempts());
+ return Duration.ofMillis(delayMillis);
+ });
+
+ if (Config.getInstance().getPropertyAsBoolean(Config.AlpineKey.METRICS_ENABLED)) {
+ optionsBuilder
+ .meterRegistry(Metrics.getRegistry())
+ .pcInstanceTag(processorName);
+ }
+
+ final ParallelConsumerOptions options = optionsBuilder.build();
+ LOGGER.debug("Creating parallel consumer for processor %s with options %s".formatted(processorName, options));
+ return ParallelStreamProcessor.createEosStreamProcessor(options);
+ }
+
+ private Consumer createConsumer(final String processorName) {
+ final var consumerConfig = new HashMap();
+ consumerConfig.put(BOOTSTRAP_SERVERS_CONFIG, config.getProperty(KAFKA_BOOTSTRAP_SERVERS));
+ consumerConfig.put(CLIENT_ID_CONFIG, "%s-consumer".formatted(processorName));
+ consumerConfig.put(GROUP_ID_CONFIG, processorName);
+
+ final String propertyPrefix = "%s.consumer".formatted(processorName.toLowerCase());
+ final Map properties = getPassThroughProperties(propertyPrefix);
+ for (final Map.Entry property : properties.entrySet()) {
+ if (!ConsumerConfig.configNames().contains(property.getKey())) {
+ LOGGER.warn("Consumer property %s was set for processor %s, but is unknown; Ignoring"
+ .formatted(property.getKey(), processorName));
+ continue;
+ }
+
+ consumerConfig.put(property.getKey(), property.getValue());
+ }
+
+ // Properties that MUST NOT be overwritten under any circumstance have to be applied
+ // AFTER pass-through properties.
+ consumerConfig.put(KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
+ consumerConfig.put(VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
+ consumerConfig.put(ENABLE_AUTO_COMMIT_CONFIG, false); // Commits are managed by parallel consumer
+
+ LOGGER.debug("Creating consumer for processor %s with options %s".formatted(processorName, consumerConfig));
+ final var consumer = new KafkaConsumer(consumerConfig);
+
+ if (config.getPropertyAsBoolean(Config.AlpineKey.METRICS_ENABLED)) {
+ new KafkaClientMetrics(consumer).bindTo(Metrics.getRegistry());
+ }
+
+ return consumer;
+ }
+
+ private Map getPassThroughProperties(final String prefix) {
+ final String fullPrefix = "kafka.processor.%s".formatted(prefix);
+ final Pattern fullPrefixPattern = Pattern.compile(Pattern.quote("%s.".formatted(fullPrefix)));
+
+ final Map properties = config.getPassThroughProperties(fullPrefix);
+ if (properties.isEmpty()) {
+ return properties;
+ }
+
+ final var trimmedProperties = new HashMap(properties.size());
+ for (final Map.Entry property : properties.entrySet()) {
+ final String trimmedKey = fullPrefixPattern.matcher(property.getKey()).replaceFirst("");
+ trimmedProperties.put(trimmedKey, property.getValue());
+ }
+
+ return trimmedProperties;
+ }
+
+ private static void requireValidProcessorName(final String name) {
+ if (name == null) {
+ throw new IllegalArgumentException("name must not be null");
+ }
+ if (!PROCESSOR_NAME_PATTERN.matcher(name).matches()) {
+ throw new IllegalArgumentException("name is invalid; names must match the regular expression %s"
+ .formatted(PROCESSOR_NAME_PATTERN.pattern()));
+ }
+ }
+
+ private static IntervalFunction getRetryIntervalFunction(final Map properties) {
+ final long initialDelayMs = Optional.ofNullable(properties.get(PROPERTY_RETRY_INITIAL_DELAY_MS))
+ .map(Long::parseLong)
+ .orElse(PROPERTY_RETRY_INITIAL_DELAY_MS_DEFAULT);
+ final long maxDelayMs = Optional.ofNullable(properties.get(PROPERTY_RETRY_MAX_DELAY_MS))
+ .map(Long::parseLong)
+ .orElse(PROPERTY_RETRY_MAX_DELAY_MS_DEFAULT);
+ final int multiplier = Optional.ofNullable(properties.get(PROPERTY_RETRY_MULTIPLIER))
+ .map(Integer::parseInt)
+ .orElse(PROPERTY_RETRY_MULTIPLIER_DEFAULT);
+ final double randomizationFactor = Optional.ofNullable(properties.get(PROPERTY_RETRY_RANDOMIZATION_FACTOR))
+ .map(Double::parseDouble)
+ .orElse(PROPERTY_RETRY_RANDOMIZATION_FACTOR_DEFAULT);
+
+ return IntervalFunction.ofExponentialRandomBackoff(Duration.ofMillis(initialDelayMs),
+ multiplier, randomizationFactor, Duration.ofMillis(maxDelayMs));
+ }
+
+ private record ManagedProcessor(ParallelStreamProcessor parallelConsumer,
+ ProcessingStrategy processingStrategy,
+ String topic) {
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/api/SingleRecordProcessingStrategy.java b/src/main/java/org/dependencytrack/event/kafka/processor/api/SingleRecordProcessingStrategy.java
new file mode 100644
index 000000000..247e65943
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/api/SingleRecordProcessingStrategy.java
@@ -0,0 +1,67 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import alpine.common.logging.Logger;
+import io.confluent.parallelconsumer.PCRetriableException;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.errors.SerializationException;
+import org.apache.kafka.common.serialization.Serde;
+import org.dependencytrack.event.kafka.processor.exception.ProcessingException;
+
+import java.util.List;
+
+/**
+ * A {@link ProcessingStrategy} that processes records individually.
+ *
+ * @param Type of the {@link ConsumerRecord} key
+ * @param Type of the {@link ConsumerRecord} value
+ */
+class SingleRecordProcessingStrategy extends AbstractProcessingStrategy {
+
+ private static final Logger LOGGER = Logger.getLogger(SingleRecordProcessingStrategy.class);
+
+ private final Processor processor;
+
+ SingleRecordProcessingStrategy(final Processor processor,
+ final Serde keySerde, final Serde valueSerde) {
+ super(keySerde, valueSerde);
+ this.processor = processor;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void processRecords(final List> records) {
+ if (records.isEmpty()) {
+ return;
+ }
+ if (records.size() > 1) {
+ throw new IllegalArgumentException("Expected at most one record, but received %d".formatted(records.size()));
+ }
+
+ final ConsumerRecord record = records.get(0);
+
+ final ConsumerRecord deserializedRecord;
+ try {
+ deserializedRecord = deserialize(record);
+ } catch (SerializationException e) {
+ LOGGER.error("Failed to deserialize consumer record %s; Skipping".formatted(record), e);
+ // TODO: Consider supporting error handlers, e.g. to send record to DLT.
+ return; // Skip record to avoid poison-pill scenario.
+ }
+
+ try {
+ processor.process(deserializedRecord);
+ } catch (ProcessingException | RuntimeException e) {
+ if (isRetryableException(e)) {
+ LOGGER.warn("Encountered retryable exception while processing %s".formatted(deserializedRecord), e);
+ throw new PCRetriableException(e);
+ }
+
+ LOGGER.error("Encountered non-retryable exception while processing %s; Skipping".formatted(deserializedRecord), e);
+ // TODO: Consider supporting error handlers, e.g. to send record to DLT.
+ // Skip record to avoid poison-pill scenario.
+ }
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/exception/ProcessingException.java b/src/main/java/org/dependencytrack/event/kafka/processor/exception/ProcessingException.java
new file mode 100644
index 000000000..56dbb56e0
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/exception/ProcessingException.java
@@ -0,0 +1,29 @@
+package org.dependencytrack.event.kafka.processor.exception;
+
+/**
+ * An {@link Exception} indicating an error during record processing.
+ */
+public class ProcessingException extends Exception {
+
+ /**
+ * {@inheritDoc}
+ */
+ public ProcessingException(final String message) {
+ super(message);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public ProcessingException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public ProcessingException(final Throwable cause) {
+ super(cause);
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/processor/exception/RetryableProcessingException.java b/src/main/java/org/dependencytrack/event/kafka/processor/exception/RetryableProcessingException.java
new file mode 100644
index 000000000..1dd51fb75
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/processor/exception/RetryableProcessingException.java
@@ -0,0 +1,29 @@
+package org.dependencytrack.event.kafka.processor.exception;
+
+/**
+ * A {@link ProcessingException} indicating a retryable error.
+ */
+public class RetryableProcessingException extends ProcessingException {
+
+ /**
+ * {@inheritDoc}
+ */
+ public RetryableProcessingException(final String message) {
+ super(message);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public RetryableProcessingException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public RetryableProcessingException(final Throwable cause) {
+ super(cause);
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaStreamsInitializer.java b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java
similarity index 95%
rename from src/main/java/org/dependencytrack/event/kafka/KafkaStreamsInitializer.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java
index 48d70695d..4358822d6 100644
--- a/src/main/java/org/dependencytrack/event/kafka/KafkaStreamsInitializer.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsInitializer.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka;
+package org.dependencytrack.event.kafka.streams;
import alpine.Config;
import alpine.common.logging.Logger;
@@ -11,9 +11,9 @@
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.dependencytrack.common.ConfigKey;
-import org.dependencytrack.event.kafka.exception.KafkaStreamsDeserializationExceptionHandler;
-import org.dependencytrack.event.kafka.exception.KafkaStreamsProductionExceptionHandler;
-import org.dependencytrack.event.kafka.exception.KafkaStreamsUncaughtExceptionHandler;
+import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsDeserializationExceptionHandler;
+import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsProductionExceptionHandler;
+import org.dependencytrack.event.kafka.streams.exception.KafkaStreamsUncaughtExceptionHandler;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
diff --git a/src/main/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyFactory.java b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java
similarity index 91%
rename from src/main/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyFactory.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java
index 2b6cb2e38..026ff0686 100644
--- a/src/main/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyFactory.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyFactory.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka;
+package org.dependencytrack.event.kafka.streams;
import alpine.Config;
import alpine.common.logging.Logger;
@@ -21,10 +21,9 @@
import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent;
import org.dependencytrack.event.ProjectMetricsUpdateEvent;
import org.dependencytrack.event.ProjectPolicyEvaluationEvent;
-import org.dependencytrack.event.kafka.processor.DelayedBomProcessedNotificationProcessor;
-import org.dependencytrack.event.kafka.processor.MirrorVulnerabilityProcessor;
-import org.dependencytrack.event.kafka.processor.RepositoryMetaResultProcessor;
-import org.dependencytrack.event.kafka.processor.VulnerabilityScanResultProcessor;
+import org.dependencytrack.event.kafka.KafkaTopics;
+import org.dependencytrack.event.kafka.streams.processor.DelayedBomProcessedNotificationProcessor;
+import org.dependencytrack.event.kafka.streams.processor.VulnerabilityScanResultProcessor;
import org.dependencytrack.model.VulnerabilityScan;
import org.dependencytrack.model.WorkflowState;
import org.dependencytrack.model.WorkflowStatus;
@@ -217,18 +216,6 @@ Topology createTopology() {
Event.dispatch(policyEvaluationEvent);
}, Named.as("trigger_policy_evaluation"));
- streamsBuilder
- .stream(KafkaTopics.REPO_META_ANALYSIS_RESULT.name(),
- Consumed.with(KafkaTopics.REPO_META_ANALYSIS_RESULT.keySerde(), KafkaTopics.REPO_META_ANALYSIS_RESULT.valueSerde())
- .withName("consume_from_%s_topic".formatted(KafkaTopics.REPO_META_ANALYSIS_RESULT.name())))
- .process(RepositoryMetaResultProcessor::new, Named.as("process_repo_meta_analysis_result"));
-
- streamsBuilder
- .stream(KafkaTopics.NEW_VULNERABILITY.name(),
- Consumed.with(KafkaTopics.NEW_VULNERABILITY.keySerde(), KafkaTopics.NEW_VULNERABILITY.valueSerde())
- .withName("consume_from_%s_topic".formatted(KafkaTopics.NEW_VULNERABILITY.name())))
- .process(MirrorVulnerabilityProcessor::new, Named.as("process_mirror_vulnerability"));
-
return streamsBuilder.build(streamsProperties);
}
diff --git a/src/main/java/org/dependencytrack/event/kafka/exception/AbstractThresholdBasedExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java
similarity index 96%
rename from src/main/java/org/dependencytrack/event/kafka/exception/AbstractThresholdBasedExceptionHandler.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java
index 5089560ef..b11705602 100644
--- a/src/main/java/org/dependencytrack/event/kafka/exception/AbstractThresholdBasedExceptionHandler.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/exception/AbstractThresholdBasedExceptionHandler.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import java.time.Clock;
import java.time.Duration;
diff --git a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java
similarity index 98%
rename from src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandler.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java
index 83bc9dd50..e91c9a38c 100644
--- a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandler.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandler.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import alpine.Config;
import alpine.common.logging.Logger;
diff --git a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java
similarity index 98%
rename from src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandler.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java
index afae44abd..37cc45963 100644
--- a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandler.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandler.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import alpine.Config;
import alpine.common.logging.Logger;
diff --git a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandler.java b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java
similarity index 98%
rename from src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandler.java
rename to src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java
index c862ddd3b..492827890 100644
--- a/src/main/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandler.java
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandler.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import alpine.Config;
import alpine.common.logging.Logger;
diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java b/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java
new file mode 100644
index 000000000..5df73b646
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/processor/DelayedBomProcessedNotificationProcessor.java
@@ -0,0 +1,88 @@
+package org.dependencytrack.event.kafka.streams.processor;
+
+import alpine.common.logging.Logger;
+import alpine.notification.NotificationLevel;
+import org.apache.kafka.streams.processor.api.ContextualProcessor;
+import org.apache.kafka.streams.processor.api.Processor;
+import org.apache.kafka.streams.processor.api.Record;
+import org.dependencytrack.model.Bom;
+import org.dependencytrack.model.Project;
+import org.dependencytrack.model.VulnerabilityScan;
+import org.dependencytrack.model.WorkflowStatus;
+import org.dependencytrack.model.WorkflowStep;
+import org.dependencytrack.notification.NotificationConstants;
+import org.dependencytrack.notification.NotificationGroup;
+import org.dependencytrack.notification.NotificationScope;
+import org.dependencytrack.notification.vo.BomConsumedOrProcessed;
+import org.dependencytrack.persistence.QueryManager;
+import org.dependencytrack.proto.notification.v1.Notification;
+
+import javax.jdo.Query;
+import java.util.UUID;
+
+import static org.dependencytrack.parser.dependencytrack.NotificationModelConverter.convert;
+
+/**
+ * A {@link Processor} responsible for dispatching {@link NotificationGroup#BOM_PROCESSED} notifications
+ * upon detection of a completed {@link VulnerabilityScan}.
+ */
+public class DelayedBomProcessedNotificationProcessor extends ContextualProcessor {
+
+ private static final Logger LOGGER = Logger.getLogger(DelayedBomProcessedNotificationProcessor.class);
+
+ @Override
+ public void process(final Record record) {
+ final VulnerabilityScan vulnScan = record.value();
+
+ if (vulnScan.getStatus() != VulnerabilityScan.Status.COMPLETED
+ && vulnScan.getStatus() != VulnerabilityScan.Status.FAILED) {
+ LOGGER.warn("Received vulnerability scan with non-terminal status %s; Dropping (token=%s, project=%s)"
+ .formatted(vulnScan.getStatus(), vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ return;
+ }
+
+ final Project project;
+ try (final var qm = new QueryManager()) {
+ if (!qm.hasWorkflowStepWithStatus(UUID.fromString(vulnScan.getToken()), WorkflowStep.BOM_PROCESSING, WorkflowStatus.COMPLETED)) {
+ LOGGER.debug("Received completed vulnerability scan, but no %s step exists in this workflow; Dropping (token=%s, project=%s)"
+ .formatted(WorkflowStep.BOM_PROCESSING, vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ return;
+ }
+
+ project = getProject(qm, vulnScan.getTargetIdentifier());
+ if (project == null) {
+ LOGGER.warn("Received completed vulnerability scan, but the target project does not exist; Dropping (token=%s, project=%s)"
+ .formatted(vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ return;
+ }
+ }
+
+ final var alpineNotification = new alpine.notification.Notification()
+ .scope(NotificationScope.PORTFOLIO)
+ .group(NotificationGroup.BOM_PROCESSED)
+ .level(NotificationLevel.INFORMATIONAL)
+ .title(NotificationConstants.Title.BOM_PROCESSED)
+ // BOM format and spec version are hardcoded because we don't have this information at this point.
+ // DT currently only accepts CycloneDX anyway.
+ .content("A %s BOM was processed".formatted(Bom.Format.CYCLONEDX.getFormatShortName()))
+ .subject(new BomConsumedOrProcessed(UUID.fromString(vulnScan.getToken()), project, /* bom */ "(Omitted)", Bom.Format.CYCLONEDX, "Unknown"));
+
+ context().forward(record.withKey(project.getUuid().toString()).withValue(convert(alpineNotification)));
+ LOGGER.info("Dispatched delayed %s notification (token=%s, project=%s)"
+ .formatted(NotificationGroup.BOM_PROCESSED, vulnScan.getToken(), vulnScan.getTargetIdentifier()));
+ }
+
+ private static Project getProject(final QueryManager qm, final UUID uuid) {
+ final Query projectQuery = qm.getPersistenceManager().newQuery(Project.class);
+ projectQuery.setFilter("uuid == :uuid");
+ projectQuery.setParameters(uuid);
+ projectQuery.getFetchPlan().clearGroups(); // Ensure we're not loading too much bloat.
+ projectQuery.getFetchPlan().setGroup(Project.FetchGroup.NOTIFICATION.name());
+ try {
+ return qm.getPersistenceManager().detachCopy(projectQuery.executeResultUnique(Project.class));
+ } finally {
+ projectQuery.closeAll();
+ }
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java b/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java
new file mode 100644
index 000000000..64e051762
--- /dev/null
+++ b/src/main/java/org/dependencytrack/event/kafka/streams/processor/VulnerabilityScanResultProcessor.java
@@ -0,0 +1,1123 @@
+package org.dependencytrack.event.kafka.streams.processor;
+
+import alpine.Config;
+import alpine.common.logging.Logger;
+import alpine.common.metrics.Metrics;
+import alpine.notification.Notification;
+import alpine.notification.NotificationLevel;
+import com.google.protobuf.Any;
+import com.google.protobuf.Timestamp;
+import com.google.protobuf.util.Timestamps;
+import io.micrometer.core.instrument.Timer;
+import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor;
+import org.apache.kafka.streams.processor.api.ContextualProcessor;
+import org.apache.kafka.streams.processor.api.FixedKeyRecord;
+import org.dependencytrack.event.kafka.KafkaEventDispatcher;
+import org.dependencytrack.event.kafka.KafkaEventHeaders;
+import org.dependencytrack.event.kafka.KafkaUtil;
+import org.dependencytrack.model.AnalysisJustification;
+import org.dependencytrack.model.AnalysisResponse;
+import org.dependencytrack.model.AnalysisState;
+import org.dependencytrack.model.AnalyzerIdentity;
+import org.dependencytrack.model.Severity;
+import org.dependencytrack.model.Vulnerability;
+import org.dependencytrack.model.VulnerabilityAlias;
+import org.dependencytrack.model.VulnerabilityAnalysisLevel;
+import org.dependencytrack.model.mapping.PolicyProtoMapper;
+import org.dependencytrack.notification.NotificationConstants;
+import org.dependencytrack.notification.NotificationGroup;
+import org.dependencytrack.notification.NotificationScope;
+import org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln;
+import org.dependencytrack.persistence.QueryManager;
+import org.dependencytrack.persistence.jdbi.NotificationSubjectDao;
+import org.dependencytrack.policy.vulnerability.VulnerabilityPolicy;
+import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyEvaluator;
+import org.dependencytrack.policy.vulnerability.VulnerabilityPolicyRating;
+import org.dependencytrack.proto.notification.v1.Group;
+import org.dependencytrack.proto.vulnanalysis.v1.ScanKey;
+import org.dependencytrack.proto.vulnanalysis.v1.ScanResult;
+import org.dependencytrack.proto.vulnanalysis.v1.ScanStatus;
+import org.dependencytrack.proto.vulnanalysis.v1.Scanner;
+import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult;
+import org.dependencytrack.util.PersistenceUtil;
+import org.dependencytrack.util.PersistenceUtil.Differ;
+import org.jdbi.v3.core.mapper.reflect.ColumnName;
+import org.jdbi.v3.sqlobject.config.RegisterBeanMapper;
+import org.jdbi.v3.sqlobject.config.RegisterConstructorMapper;
+import org.jdbi.v3.sqlobject.customizer.BindBean;
+import org.jdbi.v3.sqlobject.customizer.BindMethods;
+import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys;
+import org.jdbi.v3.sqlobject.statement.SqlBatch;
+import org.jdbi.v3.sqlobject.statement.SqlQuery;
+
+import javax.jdo.Query;
+import javax.ws.rs.core.MultivaluedHashMap;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Objects;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import static java.util.Objects.requireNonNullElse;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+import static org.datanucleus.PropertyNames.PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT;
+import static org.datanucleus.PropertyNames.PROPERTY_RETAIN_VALUES;
+import static org.dependencytrack.common.ConfigKey.VULNERABILITY_POLICY_ANALYSIS_ENABLED;
+import static org.dependencytrack.parser.dependencytrack.ModelConverterCdxToVuln.convert;
+import static org.dependencytrack.persistence.jdbi.JdbiFactory.jdbi;
+import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABILITY;
+import static org.dependencytrack.proto.notification.v1.Group.GROUP_NEW_VULNERABLE_DEPENDENCY;
+import static org.dependencytrack.proto.notification.v1.Level.LEVEL_INFORMATIONAL;
+import static org.dependencytrack.proto.notification.v1.Scope.SCOPE_PORTFOLIO;
+import static org.dependencytrack.proto.vulnanalysis.v1.ScanStatus.SCAN_STATUS_FAILED;
+import static org.dependencytrack.proto.vulnanalysis.v1.Scanner.SCANNER_INTERNAL;
+import static org.dependencytrack.util.NotificationUtil.generateNotificationContent;
+import static org.dependencytrack.util.NotificationUtil.generateNotificationTitle;
+import static org.dependencytrack.util.VulnerabilityUtil.canBeMirrored;
+import static org.dependencytrack.util.VulnerabilityUtil.isAuthoritativeSource;
+import static org.dependencytrack.util.VulnerabilityUtil.isMirroringEnabled;
+
+/**
+ * A {@link ContextualProcessor} responsible for processing {@link ScanResult}s.
+ */
+public class VulnerabilityScanResultProcessor extends ContextualFixedKeyProcessor {
+
+ private static final Logger LOGGER = Logger.getLogger(VulnerabilityScanResultProcessor.class);
+ private static final Timer TIMER = Timer.builder("vuln_scan_result_processing")
+ .description("Time taken to process vulnerability scan results")
+ .register(Metrics.getRegistry());
+
+ private final KafkaEventDispatcher eventDispatcher = new KafkaEventDispatcher();
+ private final VulnerabilityPolicyEvaluator vulnPolicyEvaluator;
+
+ public VulnerabilityScanResultProcessor() {
+ this(Config.getInstance().getPropertyAsBoolean(VULNERABILITY_POLICY_ANALYSIS_ENABLED)
+ ? ServiceLoader.load(VulnerabilityPolicyEvaluator.class).findFirst().orElseThrow()
+ : null);
+ }
+
+ VulnerabilityScanResultProcessor(final VulnerabilityPolicyEvaluator vulnPolicyEvaluator) {
+ this.vulnPolicyEvaluator = vulnPolicyEvaluator;
+ }
+
+ @Override
+ public void process(final FixedKeyRecord record) {
+ final ScanKey scanKey = record.key();
+ final ScanResult result = record.value();
+ final UUID componentUuid = UUID.fromString(scanKey.getComponentUuid());
+ final VulnerabilityAnalysisLevel analysisLevel = determineAnalysisLevel(record);
+ final boolean isNewComponent = determineIsComponentNew(record);
+
+ final Timer.Sample timerSample = Timer.start();
+ try (final var qm = new QueryManager()) {
+ // Do not unload fields upon commit (why is this even the default WTF).
+ qm.getPersistenceManager().setProperty(PROPERTY_RETAIN_VALUES, "true");
+ qm.getPersistenceManager().setProperty(PROPERTY_PERSISTENCE_BY_REACHABILITY_AT_COMMIT, "false");
+
+ final Component component = jdbi(qm).withExtension(Dao.class, dao -> dao.getComponentByUuid(componentUuid));
+ if (component == null) {
+ LOGGER.warn("Received result for component %s, but it does not exist (scanKey: %s)"
+ .formatted(componentUuid, prettyPrint(scanKey)));
+ return;
+ }
+
+ for (final ScannerResult scannerResult : result.getScannerResultsList()) {
+ processScannerResult(qm, component, scanKey, scannerResult, analysisLevel, isNewComponent);
+ }
+ } catch (Exception e) {
+ LOGGER.error("Failed to process scan result for component %s (scanKey: %s)"
+ .formatted(componentUuid, prettyPrint(scanKey)), e);
+ } finally {
+ timerSample.stop(TIMER);
+ context().forward(record);
+ }
+ }
+
+ private void processScannerResult(final QueryManager qm, final Component component,
+ final ScanKey scanKey, final ScannerResult scannerResult,
+ final VulnerabilityAnalysisLevel analysisLevel,
+ final boolean isNewComponent) {
+ if (scannerResult.getStatus() == SCAN_STATUS_FAILED) {
+ final var message = "Scan of component %s with %s failed (scanKey: %s): %s"
+ .formatted(component.uuid(), scannerResult.getScanner(), prettyPrint(scanKey), scannerResult.getFailureReason());
+ eventDispatcher.dispatchAsync(component.projectUuid(), new Notification()
+ .scope(NotificationScope.SYSTEM)
+ .group(NotificationGroup.ANALYZER)
+ .level(NotificationLevel.ERROR)
+ .title(NotificationConstants.Title.ANALYZER_ERROR)
+ .content(message));
+ LOGGER.warn(message);
+ return;
+ } else if (scannerResult.getStatus() != ScanStatus.SCAN_STATUS_SUCCESSFUL) {
+ LOGGER.warn("Unable to process results from %s with status %s; Dropping record (scanKey: %s)"
+ .formatted(scannerResult.getScanner(), scannerResult.getStatus(), prettyPrint(scanKey)));
+ return;
+ }
+
+ final Set syncedVulns = syncVulnerabilities(qm, scanKey, scannerResult);
+ LOGGER.debug("Synchronized %d vulnerabilities reported by %s for %s (scanKey: %s)"
+ .formatted(syncedVulns.size(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)));
+
+ final Map matchedPoliciesByVulnUuid = maybeEvaluateVulnPolicies(component, syncedVulns);
+ LOGGER.debug("Identified policy matches for %d/%d vulnerabilities (scanKey: %s)"
+ .formatted(matchedPoliciesByVulnUuid.size(), syncedVulns.size(), prettyPrint(scanKey)));
+
+ final List newVulnUuids = synchronizeFindingsAndAnalyses(qm, component, syncedVulns,
+ scannerResult.getScanner(), matchedPoliciesByVulnUuid);
+ LOGGER.debug("Identified %d new vulnerabilities for %s with %s (scanKey: %s)"
+ .formatted(newVulnUuids.size(), scanKey.getComponentUuid(), scannerResult.getScanner(), prettyPrint(scanKey)));
+
+ maybeSendNotifications(qm, component, isNewComponent, analysisLevel, newVulnUuids);
+ }
+
+ /**
+ * Synchronize vulnerabilities reported in a given {@link ScannerResult} with the datastore.
+ *
+ * @param qm The {@link QueryManager} to use
+ * @param scanKey The {@link ScanKey} associated with the {@link ScannerResult}
+ * @param scannerResult The {@link ScannerResult} to synchronize vulnerabilities from
+ * @return A {@link Set} of synchronized {@link Vulnerability}s
+ */
+ private Set syncVulnerabilities(final QueryManager qm, final ScanKey scanKey, final ScannerResult scannerResult) {
+ final var syncedVulns = new HashSet();
+
+ for (final org.cyclonedx.proto.v1_4.Vulnerability reportedVuln : scannerResult.getBom().getVulnerabilitiesList()) {
+ final Vulnerability vuln;
+ try {
+ vuln = ModelConverterCdxToVuln.convert(qm, scannerResult.getBom(), reportedVuln, true);
+ } catch (RuntimeException e) {
+ LOGGER.error("Failed to convert vulnerability %s/%s (reported by %s for component %s) to internal model (scanKey: %s)"
+ .formatted(reportedVuln.getSource(), reportedVuln.getId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e);
+ continue;
+ }
+
+ try {
+ final Vulnerability syncedVuln = syncVulnerability(qm, vuln, scannerResult.getScanner());
+
+ // Detach vulnerabilities from JDO persistence context.
+ // We do not want to trigger any DB interactions by accessing their fields later.
+ // Note that even PersistenceManager#detachCopy will load / unload fields based
+ // on the current FetchPlan. But we just want to keep the data we already have,
+ // and #makeTransientAll does exactly that.
+ qm.getPersistenceManager().makeTransient(syncedVuln);
+
+ if (vuln.getAliases() != null && !vuln.getAliases().isEmpty()) {
+ final var syncedAliases = new ArrayList();
+ for (VulnerabilityAlias alias : vuln.getAliases()) {
+ final VulnerabilityAlias syncedAlias = qm.synchronizeVulnerabilityAlias(alias);
+ qm.getPersistenceManager().makeTransient(syncedAlias);
+ syncedAliases.add(syncedAlias);
+ }
+ syncedVuln.setAliases(syncedAliases);
+ }
+
+ syncedVulns.add(syncedVuln);
+ } catch (RuntimeException e) {
+ // Use a broad catch here, so we can still try to process other
+ // vulnerabilities, even though processing one of them failed.
+
+ LOGGER.warn("Failed to synchronize vulnerability %s/%s (reported by %s for component %s; scanKey: %s)"
+ .formatted(vuln.getSource(), vuln.getVulnId(), scannerResult.getScanner(), scanKey.getComponentUuid(), prettyPrint(scanKey)), e);
+ }
+ }
+
+ return syncedVulns;
+ }
+
+ /**
+ * Synchronize a given {@link Vulnerability} as reported by a given {@link Scanner} with the datastore.
+ *
+ * This method differs from {@link QueryManager#synchronizeVulnerability(Vulnerability, boolean)} in that it expects
+ * an active {@link javax.jdo.Transaction}, and only calls setters of existing vulnerabilities when the respective
+ * value actually changed, saving network round-trips.
+ *
+ * @param qm The {@link QueryManager} to use
+ * @param vuln The {@link Vulnerability} to synchronize
+ * @param scanner The {@link AnalyzerIdentity} that reported the vulnerability
+ * @return The synchronized {@link Vulnerability}
+ * @throws IllegalStateException When no {@link javax.jdo.Transaction} is active
+ * @throws NoSuchElementException When the reported vulnerability is internal, but does not exist in the datastore
+ */
+ private Vulnerability syncVulnerability(final QueryManager qm, final Vulnerability vuln, final Scanner scanner) {
+ // TODO: Refactor this to use JDBI instead.
+ // It is possible that the same vulnerability is reported for multiple components in parallel,
+ // causing unique constraint violations when attempting to INSERT into the VULNERABILITY table.
+ // In such cases, we can get away with simply retrying to SELECT or INSERT again.
+ return qm.runInRetryableTransaction(() -> {
+ final Vulnerability existingVuln;
+ final Query query = qm.getPersistenceManager().newQuery(Vulnerability.class);
+ try {
+ query.setFilter("vulnId == :vulnId && source == :source");
+ query.setParameters(vuln.getVulnId(), vuln.getSource());
+ existingVuln = query.executeUnique();
+ } finally {
+ query.closeAll();
+ }
+
+ if (existingVuln == null) {
+ if (Vulnerability.Source.INTERNAL.name().equals(vuln.getSource())) {
+ throw new NoSuchElementException("An internal vulnerability with ID %s does not exist".formatted(vuln.getVulnId()));
+ }
+
+ return qm.getPersistenceManager().makePersistent(vuln);
+ }
+
+ if (canUpdateVulnerability(existingVuln, scanner)) {
+ final var differ = new Differ<>(existingVuln, vuln);
+
+ // TODO: Consider using something like javers to get a rich diff of WHAT changed; https://github.com/javers/javers
+ differ.applyIfChanged("title", Vulnerability::getTitle, existingVuln::setTitle);
+ differ.applyIfChanged("subTitle", Vulnerability::getSubTitle, existingVuln::setSubTitle);
+ differ.applyIfChanged("description", Vulnerability::getDescription, existingVuln::setDescription);
+ differ.applyIfChanged("detail", Vulnerability::getDetail, existingVuln::setDetail);
+ differ.applyIfChanged("recommendation", Vulnerability::getRecommendation, existingVuln::setRecommendation);
+ differ.applyIfChanged("references", Vulnerability::getReferences, existingVuln::setReferences);
+ differ.applyIfChanged("credits", Vulnerability::getCredits, existingVuln::setCredits);
+ differ.applyIfChanged("created", Vulnerability::getCreated, existingVuln::setCreated);
+ differ.applyIfChanged("published", Vulnerability::getPublished, existingVuln::setPublished);
+ differ.applyIfChanged("updated", Vulnerability::getUpdated, existingVuln::setUpdated);
+ differ.applyIfChanged("cwes", Vulnerability::getCwes, existingVuln::setCwes);
+ // Calling setSeverity nulls all CVSS and OWASP RR fields. getSeverity calculates the severity on-the-fly,
+ // and will return UNASSIGNED even when no severity is set explicitly. Thus, calling setSeverity
+ // must happen before CVSS and OWASP RR fields are set, to avoid null-ing them again.
+ differ.applyIfChanged("severity", Vulnerability::getSeverity, existingVuln::setSeverity);
+ differ.applyIfChanged("cvssV2BaseScore", Vulnerability::getCvssV2BaseScore, existingVuln::setCvssV2BaseScore);
+ differ.applyIfChanged("cvssV2ImpactSubScore", Vulnerability::getCvssV2ImpactSubScore, existingVuln::setCvssV2ImpactSubScore);
+ differ.applyIfChanged("cvssV2ExploitabilitySubScore", Vulnerability::getCvssV2ExploitabilitySubScore, existingVuln::setCvssV2ExploitabilitySubScore);
+ differ.applyIfChanged("cvssV2Vector", Vulnerability::getCvssV2Vector, existingVuln::setCvssV2Vector);
+ differ.applyIfChanged("cvssv3BaseScore", Vulnerability::getCvssV3BaseScore, existingVuln::setCvssV3BaseScore);
+ differ.applyIfChanged("cvssV3ImpactSubScore", Vulnerability::getCvssV3ImpactSubScore, existingVuln::setCvssV3ImpactSubScore);
+ differ.applyIfChanged("cvssV3ExploitabilitySubScore", Vulnerability::getCvssV3ExploitabilitySubScore, existingVuln::setCvssV3ExploitabilitySubScore);
+ differ.applyIfChanged("cvssV3Vector", Vulnerability::getCvssV3Vector, existingVuln::setCvssV3Vector);
+ differ.applyIfChanged("owaspRRLikelihoodScore", Vulnerability::getOwaspRRLikelihoodScore, existingVuln::setOwaspRRLikelihoodScore);
+ differ.applyIfChanged("owaspRRTechnicalImpactScore", Vulnerability::getOwaspRRTechnicalImpactScore, existingVuln::setOwaspRRTechnicalImpactScore);
+ differ.applyIfChanged("owaspRRBusinessImpactScore", Vulnerability::getOwaspRRBusinessImpactScore, existingVuln::setOwaspRRBusinessImpactScore);
+ differ.applyIfChanged("owaspRRVector", Vulnerability::getOwaspRRVector, existingVuln::setOwaspRRVector);
+ // Aliases of existingVuln will always be null, as they'd have to be fetched separately.
+ // Synchronization of aliases is performed after synchronizing the vulnerability.
+ // updated |= applyIfChanged(existingVuln, vuln, Vulnerability::getAliases, existingVuln::setAliases);
+
+ differ.applyIfChanged("vulnerableVersions", Vulnerability::getVulnerableVersions, existingVuln::setVulnerableVersions);
+ differ.applyIfChanged("patchedVersions", Vulnerability::getPatchedVersions, existingVuln::setPatchedVersions);
+ // EPSS is an additional enrichment that no scanner currently provides.
+ // We don't want EPSS scores of CVEs to be purged just because the CVE information came from e.g. OSS Index.
+ differ.applyIfNonNullAndChanged("epssScore", Vulnerability::getEpssScore, existingVuln::setEpssScore);
+ differ.applyIfNonNullAndChanged("epssPercentile", Vulnerability::getEpssPercentile, existingVuln::setEpssPercentile);
+
+ if (!differ.getDiffs().isEmpty()) {
+ // TODO: Send a notification?
+ // (But notifications should only be sent if the transaction was committed)
+ // TODO: Reduce to DEBUG; It's set to INFO for testing
+ LOGGER.info("Vulnerability %s/%s was updated by %s: %s".formatted(vuln.getSource(), vuln.getVulnId(), scanner, differ.getDiffs()));
+ }
+ }
+
+ return existingVuln;
+ }, PersistenceUtil::isUniqueConstraintViolation);
+ }
+
+ private Map maybeEvaluateVulnPolicies(final Component component, final Collection vulns) {
+ if (vulnPolicyEvaluator == null) {
+ return Collections.emptyMap();
+ }
+
+ final var policyProject = org.dependencytrack.proto.policy.v1.Project.newBuilder()
+ .setUuid(component.projectUuid().toString())
+ .build();
+ final var policyComponent = org.dependencytrack.proto.policy.v1.Component.newBuilder()
+ .setUuid(component.uuid().toString())
+ .build();
+ final List policyVulns = vulns.stream()
+ .map(PolicyProtoMapper::mapToProto)
+ .toList();
+
+ return vulnPolicyEvaluator.evaluate(policyVulns, policyComponent, policyProject);
+ }
+
+ /**
+ * Associate a given {@link Collection} of {@link Vulnerability}s with a given {@link Component},
+ * evaluate applicable {@link VulnerabilityPolicy}s, and apply the resulting analyses.
+ *
+ * If a {@link Vulnerability} was not previously associated with the {@link Component},
+ * a {@link FindingAttribution} will be created for the {@link Scanner}.
+ *
+ * @param qm The {@link QueryManager} to use
+ * @param component The {@link Component} to associate with
+ * @param vulns The {@link Vulnerability}s to associate with
+ * @param scanner The {@link Scanner} that identified the association
+ * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()}
+ * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component},
+ * and which have not been suppressed via {@link VulnerabilityPolicy}.
+ */
+ private List synchronizeFindingsAndAnalyses(final QueryManager qm, final Component component,
+ final Collection vulns, final Scanner scanner,
+ final Map policiesByVulnUuid) {
+ return jdbi(qm).inTransaction(jdbiHandle -> {
+ final var dao = jdbiHandle.attach(Dao.class);
+
+ // Bulk-create new findings and corresponding scanner attributions.
+ final List newFindingVulnIds = dao.createFindings(component, vulns);
+ final List findingAttributions = newFindingVulnIds.stream()
+ .map(vulnId -> new FindingAttribution(vulnId, component.id(), component.projectId(),
+ convert(scanner).name(), UUID.randomUUID()))
+ .toList();
+ dao.createFindingAttributions(findingAttributions);
+
+ return maybeApplyPolicyAnalyses(dao, component, vulns, newFindingVulnIds, policiesByVulnUuid);
+ });
+ }
+
+ /**
+ * Apply analyses of matched {@link VulnerabilityPolicy}s. Do nothing when no policies matched.
+ *
+ * @param dao The {@link Dao} to use for persistence operations
+ * @param component The {@link Component} to apply analyses for
+ * @param vulns The {@link Vulnerability}s identified for the {@link Component}
+ * @param newFindingVulnIds IDs of {@link Vulnerability}s that newly affect the {@link Component}
+ * @param policiesByVulnUuid Matched {@link VulnerabilityPolicy}s grouped by {@link Vulnerability#getUuid()}
+ * @return A {@link List} of {@link Vulnerability}s, that were not previously associated with the {@link Component},
+ * and which have not been suppressed via {@link VulnerabilityPolicy}.
+ */
+ private List maybeApplyPolicyAnalyses(final Dao dao, final Component component, final Collection vulns,
+ final List newFindingVulnIds, final Map policiesByVulnUuid) {
+ // Unless we have any matching vulnerability policies, there's nothing to do!
+ if (policiesByVulnUuid.isEmpty()) {
+ return vulns.stream()
+ .filter(vuln -> newFindingVulnIds.contains(vuln.getId()))
+ .toList();
+ }
+
+ // Index vulnerabilities by ID and UUID for more efficient lookups.
+ final var vulnById = new HashMap();
+ final var vulnByUuid = new HashMap();
+ for (final Vulnerability vuln : vulns) {
+ vulnById.put(vuln.getId(), vuln);
+ vulnByUuid.put(vuln.getUuid(), vuln);
+ }
+
+ // For all vulnerabilities with matching policies, bulk-fetch existing analyses.
+ // Index them by vulnerability UUID for more efficient access.
+ final Map existingAnalyses = dao.getAnalyses(component, policiesByVulnUuid.keySet()).stream()
+ .collect(Collectors.toMap(Analysis::getVulnUuid, Function.identity()));
+
+ final var analysesToCreateOrUpdate = new ArrayList();
+ final var analysisCommentsByVulnId = new MultivaluedHashMap();
+ for (final Map.Entry vulnUuidAndPolicy : policiesByVulnUuid.entrySet()) {
+ final Vulnerability vuln = vulnByUuid.get(vulnUuidAndPolicy.getKey());
+ final VulnerabilityPolicy policy = vulnUuidAndPolicy.getValue();
+ final Analysis policyAnalysis;
+ try {
+ policyAnalysis = Analysis.fromPolicy(policy);
+ } catch (IllegalArgumentException e) {
+ LOGGER.warn("Unable to apply policy %s as it was found to be invalid".formatted(policy.name()), e);
+ continue;
+ }
+
+ final Analysis existingAnalysis = existingAnalyses.get(vuln.getUuid());
+ if (existingAnalysis == null) {
+ policyAnalysis.setComponentId(component.id());
+ policyAnalysis.setProjectId(component.projectId());
+ policyAnalysis.setVulnId(vuln.getId());
+ policyAnalysis.setVulnUuid(vuln.getUuid());
+
+ // We'll create comments for analysisId=null for now, as the Analysis we're referring
+ // to hasn't been created yet. The analysisId is populated later, after bulk upserting
+ // all analyses.
+ final var commentFactory = new AnalysisCommentFactory(null, policy);
+ if (policyAnalysis.getState() != null) {
+ commentFactory.createComment("State: %s → %s"
+ .formatted(AnalysisState.NOT_SET, policyAnalysis.getState()));
+ }
+ if (policyAnalysis.getJustification() != null) {
+ commentFactory.createComment("Justification: %s → %s"
+ .formatted(AnalysisJustification.NOT_SET, policyAnalysis.getJustification()));
+ }
+ if (policyAnalysis.getResponse() != null) {
+ commentFactory.createComment("Response: %s → %s"
+ .formatted(AnalysisResponse.NOT_SET, policyAnalysis.response));
+ }
+ if (policyAnalysis.getDetails() != null) {
+ commentFactory.createComment("Details: (None) → %s"
+ .formatted(policyAnalysis.details));
+ }
+ if (policyAnalysis.getSuppressed()) {
+ commentFactory.createComment("Unsuppressed → Suppressed");
+ }
+ if (policyAnalysis.getSeverity() != null) {
+ commentFactory.createComment("Severity: %s → %s"
+ .formatted(vuln.getSeverity(), policyAnalysis.getSeverity()));
+ }
+ if (policyAnalysis.getCvssV2Vector() != null) {
+ commentFactory.createComment("CVSSv2 Vector: %s → %s"
+ .formatted(requireNonNullElse(vuln.getCvssV2Vector(), "(None)"), policyAnalysis.getCvssV2Vector()));
+ }
+ if (policyAnalysis.getCvssV2Score() != null) {
+ commentFactory.createComment("CVSSv2 Score: %s → %s"
+ .formatted(requireNonNullElse(vuln.getCvssV2BaseScore(), "(None)"), policyAnalysis.getCvssV2Score()));
+ }
+ if (policyAnalysis.getCvssV3Vector() != null) {
+ commentFactory.createComment("CVSSv3 Vector: %s → %s"
+ .formatted(requireNonNullElse(vuln.getCvssV3Vector(), "(None)"), policyAnalysis.getCvssV3Vector()));
+ }
+ if (policyAnalysis.getCvssV3Score() != null) {
+ commentFactory.createComment("CVSSv3 Score: %s → %s"
+ .formatted(requireNonNullElse(vuln.getCvssV3BaseScore(), "(None)"), policyAnalysis.getCvssV3Score()));
+ }
+ if (policyAnalysis.getOwaspVector() != null) {
+ commentFactory.createComment("OWASP Vector: %s → %s"
+ .formatted(requireNonNullElse(vuln.getOwaspRRVector(), "(None)"), policyAnalysis.getOwaspVector()));
+ }
+ if (policyAnalysis.getOwaspScore() != null) {
+ commentFactory.createComment("OWASP Score: %s → %s"
+ .formatted(requireNonNullElse(vuln.getOwaspRRLikelihoodScore(), "(None)"), policyAnalysis.getOwaspScore()));
+ }
+ analysesToCreateOrUpdate.add(policyAnalysis);
+ analysisCommentsByVulnId.addAll(policyAnalysis.getVulnId(), commentFactory.getComments());
+ } else {
+ boolean shouldUpdate = false;
+ final var commentFactory = new AnalysisCommentFactory(existingAnalysis.getId(), policy);
+ if (!Objects.equals(existingAnalysis.getState(), policyAnalysis.getState())) {
+ commentFactory.createComment("State: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getState(), AnalysisState.NOT_SET),
+ requireNonNullElse(policyAnalysis.getState(), AnalysisState.NOT_SET)));
+
+ existingAnalysis.setState(policyAnalysis.getState());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getJustification(), policyAnalysis.getJustification())) {
+ commentFactory.createComment("Justification: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.justification, AnalysisJustification.NOT_SET),
+ requireNonNullElse(policyAnalysis.getJustification(), AnalysisJustification.NOT_SET)));
+
+ existingAnalysis.setJustification(policyAnalysis.getJustification());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getResponse(), policyAnalysis.getResponse())) {
+ commentFactory.createComment("Response: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.response, AnalysisResponse.NOT_SET),
+ requireNonNullElse(policyAnalysis.getResponse(), AnalysisResponse.NOT_SET)));
+
+ existingAnalysis.setResponse(policyAnalysis.getResponse());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.details, policyAnalysis.getDetails())) {
+ commentFactory.createComment("Details: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.details, "(None)"),
+ requireNonNullElse(policyAnalysis.getDetails(), "(None)")));
+
+ existingAnalysis.setDetails(policy.analysis().getDetails());
+ shouldUpdate = true;
+ }
+ if (existingAnalysis.getSuppressed() == null || (existingAnalysis.getSuppressed() != policyAnalysis.getSuppressed())) {
+ final String previousState = existingAnalysis.suppressed ? "Suppressed" : "Unsuppressed";
+ final String newState = policyAnalysis.getSuppressed() ? "Suppressed" : "Unsuppressed";
+ commentFactory.createComment("%s → %s".formatted(previousState, newState));
+
+ existingAnalysis.setSuppressed(policy.analysis().isSuppress());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getSeverity(), policyAnalysis.getSeverity())) {
+ commentFactory.createComment("Severity: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getSeverity(), Severity.UNASSIGNED),
+ requireNonNullElse(policyAnalysis.getSeverity(), Severity.UNASSIGNED)));
+
+ existingAnalysis.setSeverity(policyAnalysis.getSeverity());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getCvssV2Vector(), policyAnalysis.getCvssV2Vector())) {
+ commentFactory.createComment("CVSSv2 Vector: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getCvssV2Vector(), "(None)"),
+ requireNonNullElse(policyAnalysis.getCvssV2Vector(), "(None)")));
+
+ existingAnalysis.setCvssV2Vector(policyAnalysis.getCvssV2Vector());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getCvssV2Score(), policyAnalysis.getCvssV2Score())) {
+ commentFactory.createComment("CVSSv2 Score: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getCvssV2Score(), "(None)"),
+ requireNonNullElse(policyAnalysis.getCvssV2Score(), "(None)")));
+
+ existingAnalysis.setCvssV2Score(policyAnalysis.getCvssV2Score());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getCvssV3Vector(), policyAnalysis.getCvssV3Vector())) {
+ commentFactory.createComment("CVSSv3 Vector: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getCvssV3Vector(), "(None)"),
+ requireNonNullElse(policyAnalysis.getCvssV3Vector(), "(None)")));
+
+ existingAnalysis.setCvssV3Vector(policyAnalysis.getCvssV3Vector());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getCvssV3Score(), policyAnalysis.getCvssV3Score())) {
+ commentFactory.createComment("CVSSv3 Score: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getCvssV3Score(), "(None)"),
+ requireNonNullElse(policyAnalysis.getCvssV3Score(), "(None)")));
+
+ existingAnalysis.setCvssV3Score(policyAnalysis.getCvssV3Score());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getOwaspVector(), policyAnalysis.getOwaspVector())) {
+ commentFactory.createComment("OWASP Vector: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getOwaspVector(), "(None)"),
+ requireNonNullElse(policyAnalysis.getOwaspVector(), "(None)")));
+
+ existingAnalysis.setOwaspVector(policyAnalysis.getCvssV2Vector());
+ shouldUpdate = true;
+ }
+ if (!Objects.equals(existingAnalysis.getOwaspScore(), policyAnalysis.getOwaspScore())) {
+ commentFactory.createComment("OWASP Score: %s → %s".formatted(
+ requireNonNullElse(existingAnalysis.getOwaspScore(), "(None)"),
+ requireNonNullElse(policyAnalysis.getOwaspScore(), "(None)")));
+
+ existingAnalysis.setOwaspScore(policyAnalysis.getOwaspScore());
+ shouldUpdate = true;
+ }
+ if (shouldUpdate) {
+ analysesToCreateOrUpdate.add(existingAnalysis);
+ analysisCommentsByVulnId.addAll(existingAnalysis.getVulnId(), commentFactory.getComments());
+ }
+ }
+
+ // If the finding was suppressed, do not report it as new.
+ if (Boolean.TRUE.equals(policyAnalysis.getSuppressed())) {
+ newFindingVulnIds.remove(vuln.getId());
+ }
+ }
+
+ if (!analysesToCreateOrUpdate.isEmpty()) {
+ final List createdAnalyses = dao.createOrUpdateAnalyses(analysesToCreateOrUpdate);
+ // TODO: Construct notifications for PROJECT_AUDIT_CHANGE, but do not dispatch them here!
+ // They should be dispatched together with NEW_VULNERABILITY and NEW_VULNERABLE_DEPENDENCY
+ // notifications, AFTER this database transaction completed successfully.
+
+ // Comments for new analyses do not have an analysis ID set yet, as that ID was not known prior
+ // to inserting the respective analysis record. Enrich comments with analysis IDs now that we know them.
+ for (final CreatedAnalysis createdAnalysis : createdAnalyses) {
+ analysisCommentsByVulnId.computeIfPresent(createdAnalysis.vulnId(),
+ (vulnId, comments) -> comments.stream()
+ .map(comment -> new AnalysisComment(createdAnalysis.id(), comment.comment(), comment.commenter()))
+ .toList());
+ }
+
+ dao.createAnalysisComments(analysisCommentsByVulnId.values().stream().flatMap(Collection::stream).toList());
+ }
+
+ return vulnById.entrySet().stream()
+ .filter(entry -> newFindingVulnIds.contains(entry.getKey()))
+ .map(Map.Entry::getValue)
+ .toList();
+ }
+
+ /**
+ * Send {@link Group#GROUP_NEW_VULNERABLE_DEPENDENCY} and {@link Group#GROUP_NEW_VULNERABILITY} notifications
+ * for a given {@link Component}, if it was found to have at least one non-suppressed vulnerability.
+ *
+ * @param qm The {@link QueryManager} to use
+ * @param component The {@link Component} to send notifications for
+ * @param isNewComponent Whether {@code component} is new
+ * @param analysisLevel The {@link VulnerabilityAnalysisLevel}
+ * @param newVulns Newly identified {@link Vulnerability}s
+ */
+ private void maybeSendNotifications(final QueryManager qm, final Component component, final boolean isNewComponent,
+ final VulnerabilityAnalysisLevel analysisLevel, final List newVulns) {
+ if (newVulns.isEmpty()) {
+ return;
+ }
+
+ final Timestamp notificationTimestamp = Timestamps.now();
+ final var notifications = new ArrayList();
+ jdbi(qm).useExtension(NotificationSubjectDao.class, dao -> {
+ if (isNewComponent) {
+ dao.getForNewVulnerableDependency(component.uuid())
+ .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder()
+ .setScope(SCOPE_PORTFOLIO)
+ .setGroup(GROUP_NEW_VULNERABLE_DEPENDENCY)
+ .setLevel(LEVEL_INFORMATIONAL)
+ .setTimestamp(notificationTimestamp)
+ .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABLE_DEPENDENCY, subject.getProject()))
+ .setContent(generateNotificationContent(subject.getComponent(), subject.getVulnerabilitiesList()))
+ .setSubject(Any.pack(subject))
+ .build())
+ .ifPresent(notifications::add);
+ }
+
+ dao.getForNewVulnerabilities(component.uuid(), newVulns.stream().map(Vulnerability::getUuid).toList(), analysisLevel).stream()
+ .map(subject -> org.dependencytrack.proto.notification.v1.Notification.newBuilder()
+ .setScope(SCOPE_PORTFOLIO)
+ .setGroup(GROUP_NEW_VULNERABILITY)
+ .setLevel(LEVEL_INFORMATIONAL)
+ .setTimestamp(notificationTimestamp)
+ .setTitle(generateNotificationTitle(NotificationConstants.Title.NEW_VULNERABILITY, subject.getProject()))
+ .setContent(generateNotificationContent(subject.getVulnerability()))
+ .setSubject(Any.pack(subject))
+ .build())
+ .forEach(notifications::add);
+ });
+
+ for (final org.dependencytrack.proto.notification.v1.Notification notification : notifications) {
+ eventDispatcher.dispatchAsync(component.projectUuid().toString(), notification);
+ }
+ }
+
+ private boolean canUpdateVulnerability(final Vulnerability vuln, final Scanner scanner) {
+ var canUpdate = true;
+
+ // Results from the internal scanner only contain vulnId and source, nothing else.
+ // As they only refer to existing vulnerabilities in the database, no update must be performed.
+ canUpdate &= scanner != SCANNER_INTERNAL;
+
+ // Internal vulnerabilities can only be updated via REST API.
+ canUpdate &= !Vulnerability.Source.INTERNAL.name().equals(vuln.getSource());
+
+ // If the scanner is also the authoritative source of the given vulnerability,
+ // it should be able to update it. This will be the case for the OSS Index scanner
+ // and sonatype-XXX vulnerabilities for example.
+ canUpdate &= isAuthoritativeSource(vuln, convert(scanner))
+ // Alternatively, if the vulnerability could be mirrored, but mirroring
+ // is disabled, it is OK to override any existing data.
+ //
+ // Ideally, we'd track the data from all sources instead of just overriding
+ // it, but for now this will have to do it.
+ || (canBeMirrored(vuln) && !isMirroringEnabled(vuln));
+
+ return canUpdate;
+ }
+
+ private static VulnerabilityAnalysisLevel determineAnalysisLevel(final FixedKeyRecord, ?> record) {
+ return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.VULN_ANALYSIS_LEVEL)
+ .map(value -> {
+ try {
+ return VulnerabilityAnalysisLevel.valueOf(value);
+ } catch (IllegalArgumentException e) {
+ LOGGER.warn("The reported analysis type %s is invalid, assuming %s"
+ .formatted(value, VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS));
+ return VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS;
+ }
+ })
+ .orElse(VulnerabilityAnalysisLevel.PERIODIC_ANALYSIS);
+ }
+
+ private static boolean determineIsComponentNew(final FixedKeyRecord, ?> record) {
+ return KafkaUtil.getEventHeader(record.headers(), KafkaEventHeaders.IS_NEW_COMPONENT)
+ .map(Boolean::parseBoolean)
+ .orElse(false);
+ }
+
+ private static String prettyPrint(final ScanKey scanKey) {
+ return "%s/%s".formatted(scanKey.getScanToken(), scanKey.getComponentUuid());
+ }
+
+ public interface Dao {
+
+ @SqlQuery("""
+ SELECT
+ "C"."ID" AS "id",
+ "C"."UUID" AS "uuid",
+ "P"."ID" AS "projectId",
+ "P"."UUID" AS "projectUuid"
+ FROM
+ "COMPONENT" AS "C"
+ INNER JOIN
+ "PROJECT" AS "P" ON "P"."ID" = "C"."PROJECT_ID"
+ WHERE
+ "C"."UUID" = (:uuid)::TEXT
+ """)
+ @RegisterConstructorMapper(Component.class)
+ Component getComponentByUuid(final UUID uuid);
+
+ @SqlBatch("""
+ INSERT INTO "COMPONENTS_VULNERABILITIES"
+ ("COMPONENT_ID", "VULNERABILITY_ID")
+ VALUES
+ (:component.id, :vuln.id)
+ ON CONFLICT DO NOTHING
+ RETURNING "VULNERABILITY_ID"
+ """)
+ @GetGeneratedKeys("VULNERABILITY_ID")
+ List createFindings(@BindMethods("component") final Component component, @BindBean("vuln") final Iterable vuln);
+
+ @SqlBatch("""
+ INSERT INTO "FINDINGATTRIBUTION"
+ ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "ANALYZERIDENTITY", "ATTRIBUTED_ON", "UUID")
+ VALUES
+ (:vulnId, :componentId, :projectId, :analyzer, NOW(), (:uuid)::TEXT)
+ ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID") DO NOTHING
+ """)
+ void createFindingAttributions(@BindMethods final Iterable attribution);
+
+ @SqlQuery("""
+ SELECT
+ "V"."ID" AS "vulnId",
+ "V"."UUID" AS "vulnUuid",
+ "A"."ID" AS "id",
+ "A"."COMPONENT_ID" AS "componentId",
+ "A"."PROJECT_ID" AS "projectId",
+ "A"."STATE" AS "state",
+ "A"."JUSTIFICATION" AS "justification",
+ "A"."RESPONSE" AS "response",
+ "A"."DETAILS" AS "details",
+ "A"."SUPPRESSED" AS "suppressed",
+ "A"."SEVERITY" AS "severity",
+ "A"."CVSSV2VECTOR" AS "cvssV2Vector",
+ "A"."CVSSV2SCORE" AS "cvssV2Score",
+ "A"."CVSSV3VECTOR" AS "cvssV3Vector",
+ "A"."CVSSV3SCORE" AS "cvssV3Score",
+ "A"."OWASPVECTOR" AS "owaspVector",
+ "A"."OWASPSCORE" AS "owaspScore"
+ FROM
+ "VULNERABILITY" AS "V"
+ INNER JOIN
+ "ANALYSIS" AS "A" ON "A"."VULNERABILITY_ID" = "V"."ID"
+ WHERE
+ "A"."COMPONENT_ID" = :component.id
+ AND "V"."UUID" = ANY((:vulnUuids)::TEXT[])
+ """)
+ @RegisterBeanMapper(Analysis.class)
+ List getAnalyses(@BindMethods("component") final Component component, final Iterable vulnUuids);
+
+ @SqlBatch("""
+ INSERT INTO "ANALYSIS"
+ ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID", "STATE", "JUSTIFICATION", "RESPONSE", "DETAILS",
+ "SUPPRESSED", "SEVERITY", "CVSSV2VECTOR", "CVSSV2SCORE", "CVSSV3VECTOR", "CVSSV3SCORE", "OWASPVECTOR", "OWASPSCORE")
+ VALUES
+ (:vulnId, :componentId, :projectId, :state, :justification, :response, :details, :suppressed,
+ :severity, :cvssV2Vector, :cvssV2Score, :cvssV3Vector, :cvssV3Score, :owaspVector, :owaspScore)
+ ON CONFLICT ("VULNERABILITY_ID", "COMPONENT_ID", "PROJECT_ID") DO UPDATE
+ SET
+ "STATE" = :state,
+ "JUSTIFICATION" = :justification,
+ "RESPONSE" = :response,
+ "DETAILS" = :details,
+ "SUPPRESSED" = :suppressed,
+ "SEVERITY" = :severity,
+ "CVSSV2VECTOR" = :cvssV2Vector,
+ "CVSSV2SCORE" = :cvssV2Score,
+ "CVSSV3VECTOR" = :cvssV3Vector,
+ "CVSSV3SCORE" = :cvssV3Score,
+ "OWASPVECTOR" = :owaspVector,
+ "OWASPSCORE" = :owaspScore
+ RETURNING "ID", "VULNERABILITY_ID"
+ """)
+ @GetGeneratedKeys({"ID", "VULNERABILITY_ID"})
+ @RegisterConstructorMapper(CreatedAnalysis.class)
+ List createOrUpdateAnalyses(@BindBean final Iterable analysis);
+
+ @SqlBatch("""
+ INSERT INTO "ANALYSISCOMMENT"
+ ("ANALYSIS_ID", "TIMESTAMP", "COMMENT", "COMMENTER")
+ VALUES
+ (:analysisId, NOW(), :comment, :commenter)
+ """)
+ void createAnalysisComments(@BindMethods final Iterable comment);
+
+ }
+
+ public static class Analysis {
+
+ private long id;
+ private long componentId;
+ private long projectId;
+ private long vulnId;
+ private UUID vulnUuid;
+ private AnalysisState state;
+ private AnalysisJustification justification;
+ private AnalysisResponse response;
+ private String details;
+ private Boolean suppressed;
+ private Severity severity;
+ private String cvssV2Vector;
+ private Double cvssV2Score;
+ private String cvssV3Vector;
+ private Double cvssV3Score;
+ private String owaspVector;
+ private Double owaspScore;
+
+ private static Analysis fromPolicy(final VulnerabilityPolicy policy) {
+ final var analysis = new Analysis();
+ if (policy.analysis().getState() != null) {
+ analysis.setState(switch (policy.analysis().getState()) {
+ case EXPLOITABLE -> AnalysisState.EXPLOITABLE;
+ case FALSE_POSITIVE -> AnalysisState.FALSE_POSITIVE;
+ case IN_TRIAGE -> AnalysisState.IN_TRIAGE;
+ case NOT_AFFECTED -> AnalysisState.NOT_AFFECTED;
+ case RESOLVED -> AnalysisState.RESOLVED;
+ });
+ } else {
+ throw new IllegalArgumentException("Analysis of policy does not define a state");
+ }
+ if (policy.analysis().getJustification() != null) {
+ analysis.setJustification(switch (policy.analysis().getJustification()) {
+ case CODE_NOT_PRESENT -> AnalysisJustification.CODE_NOT_PRESENT;
+ case CODE_NOT_REACHABLE -> AnalysisJustification.CODE_NOT_REACHABLE;
+ case PROTECTED_AT_PERIMETER -> AnalysisJustification.PROTECTED_AT_PERIMETER;
+ case PROTECTED_AT_RUNTIME -> AnalysisJustification.PROTECTED_AT_RUNTIME;
+ case PROTECTED_BY_COMPILER -> AnalysisJustification.PROTECTED_BY_COMPILER;
+ case PROTECTED_BY_MITIGATING_CONTROL -> AnalysisJustification.PROTECTED_BY_MITIGATING_CONTROL;
+ case REQUIRES_CONFIGURATION -> AnalysisJustification.REQUIRES_CONFIGURATION;
+ case REQUIRES_DEPENDENCY -> AnalysisJustification.REQUIRES_DEPENDENCY;
+ case REQUIRES_ENVIRONMENT -> AnalysisJustification.REQUIRES_ENVIRONMENT;
+ });
+ }
+ if (policy.analysis().getVendorResponse() != null) {
+ analysis.setResponse(switch (policy.analysis().getVendorResponse()) {
+ case CAN_NOT_FIX -> AnalysisResponse.CAN_NOT_FIX;
+ case ROLLBACK -> AnalysisResponse.ROLLBACK;
+ case UPDATE -> AnalysisResponse.UPDATE;
+ case WILL_NOT_FIX -> AnalysisResponse.WILL_NOT_FIX;
+ case WORKAROUND_AVAILABLE -> AnalysisResponse.WORKAROUND_AVAILABLE;
+ });
+ }
+ if (policy.analysis().getDetails() != null) {
+ analysis.setDetails(policy.analysis().getDetails());
+ }
+ analysis.setSuppressed(policy.analysis().isSuppress());
+
+ if (policy.ratings() != null && !policy.ratings().isEmpty()) {
+ if (policy.ratings().size() > 3) {
+ throw new IllegalArgumentException("Policy defines more than three ratings");
+ }
+
+ final var methodsSeen = new HashSet();
+ for (final VulnerabilityPolicyRating policyRating : policy.ratings()) {
+ if (policyRating.getMethod() == null) {
+ throw new IllegalArgumentException("Rating #%d does not define a method"
+ .formatted(policy.ratings().indexOf(policyRating)));
+ }
+ if (!methodsSeen.add(policyRating.getMethod())) {
+ throw new IllegalArgumentException("Rating method %s is defined more than once"
+ .formatted(policyRating.getMethod()));
+ }
+ if (policyRating.getSeverity() == null) {
+ throw new IllegalArgumentException("Rating #%d (%s) does not define a severity"
+ .formatted(policy.ratings().indexOf(policyRating), policyRating.getMethod()));
+ }
+
+ analysis.setSeverity(switch (policyRating.getSeverity()) {
+ case INFO -> Severity.INFO;
+ case LOW -> Severity.LOW;
+ case MEDIUM -> Severity.MEDIUM;
+ case HIGH -> Severity.HIGH;
+ case CRITICAL -> Severity.CRITICAL;
+ });
+ switch (policyRating.getMethod()) {
+ case CVSSV2 -> {
+ analysis.setCvssV2Vector(policyRating.getVector());
+ analysis.setCvssV2Score(policyRating.getScore());
+ }
+ case CVSSV3 -> {
+ analysis.setCvssV3Vector(policyRating.getVector());
+ analysis.setCvssV3Score(policyRating.getScore());
+ }
+ case OWASP -> {
+ analysis.setOwaspVector(policyRating.getVector());
+ analysis.setOwaspScore(policyRating.getScore());
+ }
+ }
+ }
+ }
+
+ return analysis;
+ }
+
+ public long getId() {
+ return id;
+ }
+
+ public void setId(final long id) {
+ this.id = id;
+ }
+
+ public long getComponentId() {
+ return componentId;
+ }
+
+ public void setComponentId(final long componentId) {
+ this.componentId = componentId;
+ }
+
+ public long getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(final long projectId) {
+ this.projectId = projectId;
+ }
+
+ public long getVulnId() {
+ return vulnId;
+ }
+
+ public void setVulnId(final long vulnId) {
+ this.vulnId = vulnId;
+ }
+
+ public UUID getVulnUuid() {
+ return vulnUuid;
+ }
+
+ public void setVulnUuid(final UUID vulnUuid) {
+ this.vulnUuid = vulnUuid;
+ }
+
+ public AnalysisState getState() {
+ return state;
+ }
+
+ public void setState(final AnalysisState state) {
+ this.state = state;
+ }
+
+ public AnalysisJustification getJustification() {
+ return justification;
+ }
+
+ public void setJustification(final AnalysisJustification justification) {
+ this.justification = justification;
+ }
+
+ public AnalysisResponse getResponse() {
+ return response;
+ }
+
+ public void setResponse(final AnalysisResponse response) {
+ this.response = response;
+ }
+
+ public String getDetails() {
+ return details;
+ }
+
+ public void setDetails(final String details) {
+ this.details = details;
+ }
+
+ public Boolean getSuppressed() {
+ return suppressed;
+ }
+
+ public void setSuppressed(final Boolean suppressed) {
+ this.suppressed = suppressed;
+ }
+
+ public Severity getSeverity() {
+ return severity;
+ }
+
+ public void setSeverity(final Severity severity) {
+ this.severity = severity;
+ }
+
+ public String getCvssV2Vector() {
+ return cvssV2Vector;
+ }
+
+ public void setCvssV2Vector(final String cvssV2Vector) {
+ this.cvssV2Vector = cvssV2Vector;
+ }
+
+ public Double getCvssV2Score() {
+ return cvssV2Score;
+ }
+
+ public void setCvssV2Score(final Double cvssV2Score) {
+ this.cvssV2Score = cvssV2Score;
+ }
+
+ public String getCvssV3Vector() {
+ return cvssV3Vector;
+ }
+
+ public void setCvssV3Vector(final String cvssV3Vector) {
+ this.cvssV3Vector = cvssV3Vector;
+ }
+
+ public Double getCvssV3Score() {
+ return cvssV3Score;
+ }
+
+ public void setCvssV3Score(final Double cvssV3Score) {
+ this.cvssV3Score = cvssV3Score;
+ }
+
+ public String getOwaspVector() {
+ return owaspVector;
+ }
+
+ public void setOwaspVector(final String owaspVector) {
+ this.owaspVector = owaspVector;
+ }
+
+ public Double getOwaspScore() {
+ return owaspScore;
+ }
+
+ public void setOwaspScore(final Double owaspScore) {
+ this.owaspScore = owaspScore;
+ }
+
+ }
+
+ public record CreatedAnalysis(long id, @ColumnName("VULNERABILITY_ID") long vulnId) {
+ }
+
+ public record AnalysisComment(Long analysisId, String comment, String commenter) {
+ }
+
+ private static final class AnalysisCommentFactory {
+
+ private final Long analysisId;
+ private final VulnerabilityPolicy policy;
+ private final String commenter;
+ private final List comments;
+
+ private AnalysisCommentFactory(final Long analysisId, VulnerabilityPolicy policy) {
+ this.analysisId = analysisId;
+ this.policy = policy;
+ this.commenter = createCommenter(policy);
+ this.comments = new ArrayList<>();
+ }
+
+ private void createComment(final String comment) {
+ comments.add(new AnalysisComment(this.analysisId, comment, this.commenter));
+ }
+
+ private List getComments() {
+ if (comments.isEmpty()) {
+ return comments;
+ }
+
+ // If we have comments already, additionally include what the policy matched on.
+ // Include this as the very first comment, and do not modify the original list.
+ final var commentsCopy = new ArrayList();
+ commentsCopy.add(new AnalysisComment(this.analysisId, "Matched on condition(s):\n%s"
+ .formatted(policy.conditions().stream().map("- %s"::formatted).collect(Collectors.joining("\n"))), this.commenter));
+ commentsCopy.addAll(comments);
+ return commentsCopy;
+ }
+
+ private static String createCommenter(final VulnerabilityPolicy policy) {
+ if (isNotBlank(policy.author())) {
+ return "[Policy{Name=%s, Author=%s}]".formatted(policy.name(), policy.author());
+ }
+
+ return "[Policy{Name=%s}]".formatted(policy.name());
+ }
+
+ }
+
+ public record Component(long id, UUID uuid, long projectId, UUID projectUuid) {
+ }
+
+ public record FindingAttribution(long vulnId, long componentId, long projectId, String analyzer, UUID uuid) {
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java
index 60ad5dd04..d1e3595c4 100644
--- a/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java
+++ b/src/main/java/org/dependencytrack/health/HealthCheckInitializer.java
@@ -24,6 +24,7 @@
import alpine.server.health.checks.DatabaseHealthCheck;
import io.github.mweirauch.micrometer.jvm.extras.ProcessMemoryMetrics;
import io.github.mweirauch.micrometer.jvm.extras.ProcessThreadMetrics;
+import org.dependencytrack.event.kafka.processor.KafkaProcessorsHealthCheck;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
@@ -35,8 +36,8 @@ public class HealthCheckInitializer implements ServletContextListener {
@Override
public void contextInitialized(final ServletContextEvent event) {
LOGGER.info("Registering health checks");
- HealthCheckRegistry.getInstance().register("database", new DatabaseHealthCheck());
- HealthCheckRegistry.getInstance().register("kafka-streams", new KafkaStreamsHealthCheck());
+ HealthCheckRegistry.getInstance().register(DatabaseHealthCheck.class.getName(), new DatabaseHealthCheck());
+ HealthCheckRegistry.getInstance().register(KafkaProcessorsHealthCheck.class.getName(), new KafkaProcessorsHealthCheck());
// TODO: Move this to its own initializer if it turns out to be useful
LOGGER.info("Registering extra process metrics");
diff --git a/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java b/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java
index c3553b3dd..730d47337 100644
--- a/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java
+++ b/src/main/java/org/dependencytrack/health/KafkaStreamsHealthCheck.java
@@ -1,7 +1,7 @@
package org.dependencytrack.health;
import org.apache.kafka.streams.KafkaStreams;
-import org.dependencytrack.event.kafka.KafkaStreamsInitializer;
+import org.dependencytrack.event.kafka.streams.KafkaStreamsInitializer;
import org.eclipse.microprofile.health.HealthCheck;
import org.eclipse.microprofile.health.HealthCheckResponse;
import org.eclipse.microprofile.health.Liveness;
diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java
index f87bb7e6c..824113ac6 100644
--- a/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java
+++ b/src/main/java/org/dependencytrack/persistence/jdbi/NotificationSubjectDao.java
@@ -1,16 +1,20 @@
package org.dependencytrack.persistence.jdbi;
import org.dependencytrack.model.VulnerabilityAnalysisLevel;
+import org.dependencytrack.persistence.jdbi.mapping.NotificationBomRowMapper;
import org.dependencytrack.persistence.jdbi.mapping.NotificationComponentRowMapper;
import org.dependencytrack.persistence.jdbi.mapping.NotificationProjectRowMapper;
+import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectBomConsumedOrProcessedRowMapper;
import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerabilityRowMapper;
import org.dependencytrack.persistence.jdbi.mapping.NotificationSubjectNewVulnerableDependencyRowReducer;
import org.dependencytrack.persistence.jdbi.mapping.NotificationVulnerabilityRowMapper;
+import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject;
import org.dependencytrack.proto.notification.v1.NewVulnerabilitySubject;
import org.dependencytrack.proto.notification.v1.NewVulnerableDependencySubject;
import org.jdbi.v3.sqlobject.config.RegisterRowMapper;
import org.jdbi.v3.sqlobject.config.RegisterRowMappers;
import org.jdbi.v3.sqlobject.statement.SqlQuery;
+import org.jdbi.v3.sqlobject.statement.UseRowMapper;
import org.jdbi.v3.sqlobject.statement.UseRowReducer;
import java.util.Collection;
@@ -19,6 +23,7 @@
import java.util.UUID;
@RegisterRowMappers({
+ @RegisterRowMapper(NotificationBomRowMapper.class),
@RegisterRowMapper(NotificationComponentRowMapper.class),
@RegisterRowMapper(NotificationProjectRowMapper.class),
@RegisterRowMapper(NotificationVulnerabilityRowMapper.class)
@@ -128,7 +133,7 @@ LEFT JOIN LATERAL (
"C"."UUID" = (:componentUuid)::TEXT AND "V"."UUID" = ANY((:vulnUuids)::TEXT[])
AND ("A"."SUPPRESSED" IS NULL OR NOT "A"."SUPPRESSED")
""")
- @RegisterRowMapper(NotificationSubjectNewVulnerabilityRowMapper.class)
+ @UseRowMapper(NotificationSubjectNewVulnerabilityRowMapper.class)
List getForNewVulnerabilities(final UUID componentUuid, final Collection vulnUuids,
final VulnerabilityAnalysisLevel vulnAnalysisLevel);
@@ -235,4 +240,37 @@ LEFT JOIN LATERAL (
@UseRowReducer(NotificationSubjectNewVulnerableDependencyRowReducer.class)
Optional getForNewVulnerableDependency(final UUID componentUuid);
+ @SqlQuery("""
+ SELECT
+ "P"."UUID" AS "projectUuid",
+ "P"."NAME" AS "projectName",
+ "P"."VERSION" AS "projectVersion",
+ "P"."DESCRIPTION" AS "projectDescription",
+ "P"."PURL" AS "projectPurl",
+ (SELECT
+ ARRAY_AGG(DISTINCT "T"."NAME")
+ FROM
+ "TAG" AS "T"
+ INNER JOIN
+ "PROJECTS_TAGS" AS "PT" ON "PT"."TAG_ID" = "T"."ID"
+ WHERE
+ "PT"."PROJECT_ID" = "P"."ID"
+ ) AS "projectTags",
+ 'CycloneDX' AS "bomFormat",
+ '(Unknown)' AS "bomSpecVersion",
+ '(Omitted)' AS "bomContent"
+ FROM
+ "VULNERABILITYSCAN" AS "VS"
+ INNER JOIN
+ "PROJECT" AS "P" ON "P"."UUID" = "VS"."TARGET_IDENTIFIER"
+ INNER JOIN
+ "WORKFLOW_STATE" AS "WFS" ON "WFS"."TOKEN" = "VS"."TOKEN"
+ AND "WFS"."STEP" = 'BOM_PROCESSING'
+ AND "WFS"."STATUS" = 'COMPLETED'
+ WHERE
+ "VS"."TOKEN" = ANY(:tokens)
+ """)
+ @UseRowMapper(NotificationSubjectBomConsumedOrProcessedRowMapper.class)
+ List getForDelayedBomProcessed(final Collection tokens);
+
}
diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java
new file mode 100644
index 000000000..d6e53e68f
--- /dev/null
+++ b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationBomRowMapper.java
@@ -0,0 +1,23 @@
+package org.dependencytrack.persistence.jdbi.mapping;
+
+import org.dependencytrack.proto.notification.v1.Bom;
+import org.jdbi.v3.core.mapper.RowMapper;
+import org.jdbi.v3.core.statement.StatementContext;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import static org.dependencytrack.persistence.jdbi.mapping.RowMapperUtil.maybeSet;
+
+public class NotificationBomRowMapper implements RowMapper {
+
+ @Override
+ public Bom map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ final Bom.Builder builder = Bom.newBuilder();
+ maybeSet(rs, "bomFormat", ResultSet::getString, builder::setFormat);
+ maybeSet(rs, "bomSpecVersion", ResultSet::getString, builder::setSpecVersion);
+ maybeSet(rs, "bomContent", ResultSet::getString, builder::setContent);
+ return builder.build();
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java
new file mode 100644
index 000000000..66b1c4036
--- /dev/null
+++ b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/NotificationSubjectBomConsumedOrProcessedRowMapper.java
@@ -0,0 +1,32 @@
+package org.dependencytrack.persistence.jdbi.mapping;
+
+import org.dependencytrack.proto.notification.v1.Bom;
+import org.dependencytrack.proto.notification.v1.BomConsumedOrProcessedSubject;
+import org.dependencytrack.proto.notification.v1.Project;
+import org.jdbi.v3.core.mapper.NoSuchMapperException;
+import org.jdbi.v3.core.mapper.RowMapper;
+import org.jdbi.v3.core.statement.StatementContext;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import static org.dependencytrack.persistence.jdbi.mapping.RowMapperUtil.maybeSet;
+
+public class NotificationSubjectBomConsumedOrProcessedRowMapper implements RowMapper {
+
+ @Override
+ public BomConsumedOrProcessedSubject map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ final RowMapper projectRowMapper = ctx.findRowMapperFor(Project.class)
+ .orElseThrow(() -> new NoSuchMapperException("No mapper registered for %s".formatted(Project.class)));
+ final RowMapper bomRowMapper = ctx.findRowMapperFor(Bom.class)
+ .orElseThrow(() -> new NoSuchMapperException("No mapper registered for %s".formatted(Bom.class)));
+
+ final BomConsumedOrProcessedSubject.Builder builder = BomConsumedOrProcessedSubject.newBuilder()
+ .setProject(projectRowMapper.map(rs, ctx))
+ .setBom(bomRowMapper.map(rs, ctx));
+ maybeSet(rs, "token", ResultSet::getString, builder::setToken);
+
+ return builder.build();
+ }
+
+}
diff --git a/src/main/java/org/dependencytrack/persistence/jdbi/mapping/VulnerabilityScanRowMapper.java b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/VulnerabilityScanRowMapper.java
new file mode 100644
index 000000000..aaad46653
--- /dev/null
+++ b/src/main/java/org/dependencytrack/persistence/jdbi/mapping/VulnerabilityScanRowMapper.java
@@ -0,0 +1,31 @@
+package org.dependencytrack.persistence.jdbi.mapping;
+
+import org.dependencytrack.model.VulnerabilityScan;
+import org.dependencytrack.model.VulnerabilityScan.Status;
+import org.dependencytrack.model.VulnerabilityScan.TargetType;
+import org.jdbi.v3.core.mapper.RowMapper;
+import org.jdbi.v3.core.statement.StatementContext;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.UUID;
+
+import static org.dependencytrack.persistence.jdbi.mapping.RowMapperUtil.maybeSet;
+
+public class VulnerabilityScanRowMapper implements RowMapper {
+
+ @Override
+ public VulnerabilityScan map(final ResultSet rs, final StatementContext ctx) throws SQLException {
+ final var vulnScan = new VulnerabilityScan();
+ maybeSet(rs, "token", ResultSet::getString, vulnScan::setToken);
+ maybeSet(rs, "scan_total", ResultSet::getLong, vulnScan::setScanTotal);
+ maybeSet(rs, "scan_failed", ResultSet::getLong, vulnScan::setScanFailed);
+ maybeSet(rs, "status", ResultSet::getString, status -> vulnScan.setStatus(Status.valueOf(status)));
+ maybeSet(rs, "target_type", ResultSet::getString, type -> vulnScan.setTargetType(TargetType.valueOf(type)));
+ maybeSet(rs, "target_identifier", ResultSet::getString, identifier -> vulnScan.setTargetIdentifier(UUID.fromString(identifier)));
+ maybeSet(rs, "failure_threshold", ResultSet::getDouble, vulnScan::setFailureThreshold);
+ maybeSet(rs, "failure_reason", ResultSet::getString, vulnScan::setFailureReason);
+ return vulnScan;
+ }
+
+}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index e3db0238c..f1dc9feb6 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -434,6 +434,39 @@ kafka.streams.production.exception.threshold.interval=PT30M
kafka.streams.transient.processing.exception.threshold.count=50
kafka.streams.transient.processing.exception.threshold.interval=PT30M
+# Optional
+# alpine.kafka.processor..processing.order=partition
+# alpine.kafka.processor..max.batch.size=10
+# alpine.kafka.processor..max.concurrency=1
+# alpine.kafka.processor..retry.initial.delay.ms=1000
+# alpine.kafka.processor..retry.multiplier=1
+# alpine.kafka.processor..retry.randomization.factor=0.3
+# alpine.kafka.processor..retry.max.delay.ms=60000
+# alpine.kafka.processor..consumer.=
+alpine.kafka.processor.repo.meta.result.max.concurrency=3
+alpine.kafka.processor.repo.meta.result.processing.order=partition
+alpine.kafka.processor.repo.meta.result.consumer.group.id=dtrack-apiserver-processor
+
+alpine.kafka.processor.mirrored.vuln.max.concurrency=3
+alpine.kafka.processor.mirrored.vuln.processing.order=partition
+alpine.kafka.processor.mirrored.vuln.retry.initial.delay.ms=3000
+alpine.kafka.processor.mirrored.vuln.retry.multiplier=2
+alpine.kafka.processor.mirrored.vuln.retry.randomization.factor=0.3
+alpine.kafka.processor.mirrored.vuln.retry.max.delay.ms=180000
+alpine.kafka.processor.mirrored.vuln.consumer.group.id=dtrack-apiserver-processor
+
+alpine.kafka.processor.processed.vuln.scan.result.processing.order=unordered
+alpine.kafka.processor.processed.vuln.scan.result.max.batch.size=500
+alpine.kafka.processor.processed.vuln.scan.result.max.concurrency=1
+alpine.kafka.processor.processed.vuln.scan.result.consumer.group.id=dtrack-apiserver-processor
+alpine.kafka.processor.processed.vuln.scan.result.consumer.max.poll.records=1000
+alpine.kafka.processor.processed.vuln.scan.result.consumer.fetch.min.bytes=16384
+
+alpine.kafka.processor.delayed.bom.processed.notification.processing.order=unordered
+alpine.kafka.processor.delayed.bom.processed.notification.max.batch.size=100
+alpine.kafka.processor.delayed.bom.processed.notification.max.concurrency=1
+alpine.kafka.processor.delayed.bom.processed.notification.consumer.group.id=dtrack-apiserver-processor
+
# Scheduling tasks after 3 minutes (3*60*1000) of starting application
task.scheduler.initial.delay=180000
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index 74af896da..099a11dc1 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -48,7 +48,10 @@
org.dependencytrack.event.EventSubsystemInitializer
- org.dependencytrack.event.kafka.KafkaStreamsInitializer
+ org.dependencytrack.event.kafka.processor.KafkaProcessorsInitializer
+
+
+ org.dependencytrack.event.kafka.streams.KafkaStreamsInitializer
org.dependencytrack.event.PurlMigrator
diff --git a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTest.java b/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTest.java
deleted file mode 100644
index 6de799594..000000000
--- a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTest.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package org.dependencytrack.event.kafka;
-
-import net.mguenther.kafka.junit.ExternalKafkaCluster;
-import net.mguenther.kafka.junit.TopicConfig;
-import org.apache.kafka.streams.KafkaStreams;
-import org.apache.kafka.streams.StreamsConfig;
-import org.dependencytrack.PersistenceCapableTest;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.testcontainers.redpanda.RedpandaContainer;
-import org.testcontainers.utility.DockerImageName;
-
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.time.Duration;
-
-import static org.dependencytrack.assertion.Assertions.assertConditionWithTimeout;
-
-abstract class KafkaStreamsTest extends PersistenceCapableTest {
-
- @Rule
- public RedpandaContainer container = new RedpandaContainer(DockerImageName
- .parse("docker.redpanda.com/vectorized/redpanda:v23.2.13"));
-
- KafkaStreams kafkaStreams;
- ExternalKafkaCluster kafka;
- private Path kafkaStreamsStateDirectory;
-
- @Before
- public void setUp() throws Exception {
- kafka = ExternalKafkaCluster.at(container.getBootstrapServers());
-
- kafka.createTopic(TopicConfig
- .withName(KafkaTopics.VULN_ANALYSIS_COMMAND.name())
- .withNumberOfPartitions(3)
- .withNumberOfReplicas(1));
- kafka.createTopic(TopicConfig
- .withName(KafkaTopics.VULN_ANALYSIS_RESULT.name())
- .withNumberOfPartitions(3)
- .withNumberOfReplicas(1));
- kafka.createTopic(TopicConfig
- .withName(KafkaTopics.REPO_META_ANALYSIS_RESULT.name())
- .withNumberOfPartitions(3)
- .withNumberOfReplicas(1));
- kafka.createTopic(TopicConfig
- .withName(KafkaTopics.NEW_VULNERABILITY.name())
- .withNumberOfPartitions(3)
- .withNumberOfReplicas(1));
-
- kafkaStreamsStateDirectory = Files.createTempDirectory(getClass().getSimpleName());
-
- final var streamsConfig = KafkaStreamsInitializer.getDefaultProperties();
- streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, container.getBootstrapServers());
- streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, getClass().getSimpleName());
- streamsConfig.put(StreamsConfig.STATE_DIR_CONFIG, kafkaStreamsStateDirectory.toString());
- streamsConfig.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "3");
-
- kafkaStreams = new KafkaStreams(new KafkaStreamsTopologyFactory().createTopology(), streamsConfig);
- kafkaStreams.start();
-
- assertConditionWithTimeout(() -> KafkaStreams.State.RUNNING == kafkaStreams.state(), Duration.ofSeconds(5));
- }
-
- @After
- public void tearDown() {
- if (kafkaStreams != null) {
- kafkaStreams.close();
- }
- if (kafkaStreamsStateDirectory != null) {
- kafkaStreamsStateDirectory.toFile().delete();
- }
- }
-
-}
diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/AbstractProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/AbstractProcessorTest.java
new file mode 100644
index 000000000..113f1dcbc
--- /dev/null
+++ b/src/test/java/org/dependencytrack/event/kafka/processor/AbstractProcessorTest.java
@@ -0,0 +1,59 @@
+package org.dependencytrack.event.kafka.processor;
+
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.header.Headers;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+import org.apache.kafka.common.record.TimestampType;
+import org.dependencytrack.AbstractPostgresEnabledTest;
+
+import java.time.Instant;
+import java.util.Optional;
+
+import static java.util.Objects.requireNonNullElseGet;
+
+abstract class AbstractProcessorTest extends AbstractPostgresEnabledTest {
+
+ static ConsumerRecordBuilder aConsumerRecord(final K key, final V value) {
+ return new ConsumerRecordBuilder<>(key, value);
+ }
+
+ static final class ConsumerRecordBuilder {
+
+ private final K key;
+ private final V value;
+ private Instant timestamp;
+ private Headers headers;
+
+ private ConsumerRecordBuilder(final K key, final V value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ ConsumerRecordBuilder withTimestamp(final Instant timestamp) {
+ this.timestamp = timestamp;
+ return this;
+ }
+
+ ConsumerRecordBuilder withHeaders(final Headers headers) {
+ this.headers = headers;
+ return this;
+ }
+
+ ConsumerRecord build() {
+ final Instant timestamp = requireNonNullElseGet(this.timestamp, Instant::now);
+ final Headers headers = requireNonNullElseGet(this.headers, RecordHeaders::new);
+ return new ConsumerRecord<>(
+ "topicName",
+ /* partition */ 0,
+ /* offset */ 1,
+ timestamp.toEpochMilli(), TimestampType.CREATE_TIME,
+ /* serializedKeySize */ -1,
+ /* serializedValueSize */ -1,
+ this.key, this.value,
+ headers,
+ /* leaderEpoch */ Optional.empty());
+ }
+
+ }
+
+}
diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessorTest.java
similarity index 95%
rename from src/test/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessorTest.java
rename to src/test/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessorTest.java
index fb0ae342a..93c035e52 100644
--- a/src/test/java/org/dependencytrack/event/kafka/processor/MirrorVulnerabilityProcessorTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/processor/MirroredVulnerabilityProcessorTest.java
@@ -1,55 +1,26 @@
package org.dependencytrack.event.kafka.processor;
-import org.apache.kafka.common.serialization.Serdes;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.apache.kafka.streams.StreamsBuilder;
-import org.apache.kafka.streams.TestInputTopic;
-import org.apache.kafka.streams.TopologyTestDriver;
-import org.apache.kafka.streams.kstream.Consumed;
-import org.cyclonedx.proto.v1_4.Bom;
-import org.dependencytrack.PersistenceCapableTest;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerde;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer;
import org.dependencytrack.model.Severity;
import org.dependencytrack.model.Vulnerability;
import org.dependencytrack.persistence.CweImporter;
-import org.dependencytrack.util.KafkaTestUtil;
-import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.dependencytrack.util.KafkaTestUtil.generateBomFromJson;
-public class MirrorVulnerabilityProcessorTest extends PersistenceCapableTest {
-
- private TopologyTestDriver testDriver;
- private TestInputTopic inputTopic;
+public class MirroredVulnerabilityProcessorTest extends AbstractProcessorTest {
@Before
public void setUp() throws Exception {
- final var streamsBuilder = new StreamsBuilder();
- streamsBuilder
- .stream("input-topic", Consumed
- .with(Serdes.String(), new KafkaProtobufSerde<>(Bom.parser())))
- .process(MirrorVulnerabilityProcessor::new);
-
- testDriver = new TopologyTestDriver(streamsBuilder.build());
- inputTopic = testDriver.createInputTopic("input-topic",
- new StringSerializer(), new KafkaProtobufSerializer<>());
+ super.setUp();
new CweImporter().processCweDefinitions(); // Required for CWE mapping
}
- @After
- public void tearDown() {
- if (testDriver != null) {
- testDriver.close();
- }
- }
-
@Test
public void testProcessNvdVuln() throws Exception {
- inputTopic.pipeInput("NVD/CVE-2022-40489", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -91,7 +62,10 @@ public void testProcessNvdVuln() throws Exception {
{ "url": "https://github.com/thinkcmf/thinkcmf/issues/736" }
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("NVD/CVE-2022-40489", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("NVD", "CVE-2022-40489");
assertThat(vuln).isNotNull();
@@ -160,7 +134,7 @@ public void testProcessNvdVuln() throws Exception {
@Test
public void testProcessGitHubVuln() throws Exception {
- inputTopic.pipeInput("GITHUB/GHSA-fxwm-579q-49qq", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -223,7 +197,10 @@ public void testProcessGitHubVuln() throws Exception {
{ "url": "https://github.com/advisories/GHSA-fxwm-579q-49qq" }
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("GITHUB/GHSA-fxwm-579q-49qq", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("GITHUB", "GHSA-fxwm-579q-49qq");
assertThat(vuln).isNotNull();
@@ -375,7 +352,7 @@ public void testProcessGitHubVuln() throws Exception {
@Test
public void testProcessOsvVuln() throws Exception {
- inputTopic.pipeInput("OSV/GHSA-2cc5-23r7-vc4v", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -427,7 +404,10 @@ public void testProcessOsvVuln() throws Exception {
{ "url": "https://github.com/ratpack/ratpack/blob/29434f7ac6fd4b36a4495429b70f4c8163100332/ratpack-session/src/main/java/ratpack/session/clientside/ClientSideSessionConfig.java#L29" }
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("OSV/GHSA-2cc5-23r7-vc4v", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("GITHUB", "GHSA-2cc5-23r7-vc4v");
assertThat(vuln).isNotNull();
@@ -555,7 +535,7 @@ public void testProcessOsvVuln() throws Exception {
@Test
public void testProcessVulnWithoutAffects() throws Exception {
- inputTopic.pipeInput("NVD/CVE-2022-40489", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -573,7 +553,10 @@ public void testProcessVulnWithoutAffects() throws Exception {
}
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("NVD/CVE-2022-40489", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("NVD", "CVE-2022-40489");
assertThat(vuln).isNotNull();
@@ -613,7 +596,7 @@ public void testProcessVulnWithoutAffects() throws Exception {
@Test
public void testProcessVulnWithUnmatchedAffectsBomRef() throws Exception {
- inputTopic.pipeInput("NVD/CVE-2022-40489", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -639,7 +622,10 @@ public void testProcessVulnWithUnmatchedAffectsBomRef() throws Exception {
}
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("NVD/CVE-2022-40489", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("NVD", "CVE-2022-40489");
assertThat(vuln).isNotNull();
@@ -679,7 +665,7 @@ public void testProcessVulnWithUnmatchedAffectsBomRef() throws Exception {
@Test
public void testProcessVulnWithVersConstraints() throws Exception {
- inputTopic.pipeInput("NVD/CVE-2022-40489", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -731,7 +717,10 @@ public void testProcessVulnWithVersConstraints() throws Exception {
}
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("NVD/CVE-2022-40489", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("NVD", "CVE-2022-40489");
assertThat(vuln).isNotNull();
@@ -935,7 +924,7 @@ public void testProcessVulnWithVersConstraints() throws Exception {
@Test
public void testProcessVulnWithInvalidCpeOrPurl() throws Exception {
- inputTopic.pipeInput("NVD/CVE-2022-40489", KafkaTestUtil.generateBomFromJson("""
+ final var bovJson = """
{
"components": [
{
@@ -997,7 +986,10 @@ public void testProcessVulnWithInvalidCpeOrPurl() throws Exception {
}
]
}
- """));
+ """;
+
+ final var processor = new MirroredVulnerabilityProcessor();
+ processor.process(aConsumerRecord("NVD/CVE-2022-40489", generateBomFromJson(bovJson)).build());
final Vulnerability vuln = qm.getVulnerabilityByVulnId("NVD", "CVE-2022-40489");
assertThat(vuln).isNotNull();
@@ -1035,4 +1027,4 @@ public void testProcessVulnWithInvalidCpeOrPurl() throws Exception {
assertThat(vuln.getVulnerableSoftware()).isEmpty();
}
-}
+}
\ No newline at end of file
diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessorTest.java
index 16506299a..f4c953870 100644
--- a/src/test/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessorTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/processor/RepositoryMetaResultProcessorTest.java
@@ -1,15 +1,6 @@
package org.dependencytrack.event.kafka.processor;
import com.google.protobuf.Timestamp;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.serialization.StringSerializer;
-import org.apache.kafka.streams.TestInputTopic;
-import org.apache.kafka.streams.Topology;
-import org.apache.kafka.streams.TopologyTestDriver;
-import org.apache.kafka.streams.test.TestRecord;
-import org.dependencytrack.PersistenceCapableTest;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufDeserializer;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer;
import org.dependencytrack.model.Component;
import org.dependencytrack.model.FetchStatus;
import org.dependencytrack.model.IntegrityAnalysis;
@@ -19,7 +10,6 @@
import org.dependencytrack.model.RepositoryType;
import org.dependencytrack.proto.repometaanalysis.v1.AnalysisResult;
import org.dependencytrack.proto.repometaanalysis.v1.IntegrityMeta;
-import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -34,37 +24,20 @@
import static org.assertj.core.api.Assertions.assertThat;
-public class RepositoryMetaResultProcessorTest extends PersistenceCapableTest {
+public class RepositoryMetaResultProcessorTest extends AbstractProcessorTest {
@Rule
public EnvironmentVariables environmentVariables = new EnvironmentVariables();
- private TopologyTestDriver testDriver;
- private TestInputTopic inputTopic;
-
@Before
- public void setUp() {
- environmentVariables.set("INTEGRITY_CHECK_ENABLED", "true");
- final var topology = new Topology();
- topology.addSource("sourceProcessor",
- new StringDeserializer(), new KafkaProtobufDeserializer<>(AnalysisResult.parser()), "input-topic");
- topology.addProcessor("metaResultProcessor",
- RepositoryMetaResultProcessor::new, "sourceProcessor");
-
- testDriver = new TopologyTestDriver(topology);
- inputTopic = testDriver.createInputTopic("input-topic",
- new StringSerializer(), new KafkaProtobufSerializer<>());
- }
+ public void setUp() throws Exception {
+ super.setUp();
- @After
- public void tearDown() {
- if (testDriver != null) {
- testDriver.close();
- }
+ environmentVariables.set("INTEGRITY_CHECK_ENABLED", "true");
}
@Test
- public void processNewMetaModelTest() {
+ public void processNewMetaModelTest() throws Exception {
final var published = Instant.now().minus(5, ChronoUnit.MINUTES);
final var result = AnalysisResult.newBuilder()
@@ -75,10 +48,10 @@ public void processNewMetaModelTest() {
.setSeconds(published.getEpochSecond()))
.build();
- inputTopic.pipeInput("pkg:maven/foo/bar", result);
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar", result).build());
- final RepositoryMetaComponent metaComponent =
- qm.getRepositoryMetaComponent(RepositoryType.MAVEN, "foo", "bar");
+ final RepositoryMetaComponent metaComponent = qm.getRepositoryMetaComponent(RepositoryType.MAVEN, "foo", "bar");
assertThat(metaComponent).isNotNull();
assertThat(metaComponent.getRepositoryType()).isEqualTo(RepositoryType.MAVEN);
assertThat(metaComponent.getNamespace()).isEqualTo("foo");
@@ -88,14 +61,15 @@ public void processNewMetaModelTest() {
}
@Test
- public void processWithoutComponentDetailsTest() {
+ public void processWithoutComponentDetailsTest() throws Exception {
final var result = AnalysisResult.newBuilder()
.setLatestVersion("1.2.4")
.setPublished(Timestamp.newBuilder()
.setSeconds(Instant.now().getEpochSecond()))
.build();
- inputTopic.pipeInput("foo", result);
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar", result).build());
final Query query = qm.getPersistenceManager().newQuery(RepositoryMetaComponent.class);
query.setResult("count(this)");
@@ -104,7 +78,7 @@ public void processWithoutComponentDetailsTest() {
}
@Test
- public void processUpdateExistingMetaModelTest() {
+ public void processUpdateExistingMetaModelTest() throws Exception {
final var metaComponent = new RepositoryMetaComponent();
metaComponent.setRepositoryType(RepositoryType.MAVEN);
metaComponent.setNamespace("foo");
@@ -124,7 +98,8 @@ public void processUpdateExistingMetaModelTest() {
.setSeconds(published.getEpochSecond()))
.build();
- inputTopic.pipeInput("pkg:maven/foo/bar", result);
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar", result).build());
qm.getPersistenceManager().refresh(metaComponent);
assertThat(metaComponent).isNotNull();
@@ -136,7 +111,7 @@ public void processUpdateExistingMetaModelTest() {
}
@Test
- public void processUpdateOutOfOrderMetaModelTest() {
+ public void processUpdateOutOfOrderMetaModelTest() throws Exception {
final var testStartTime = new Date();
final var metaComponent = new RepositoryMetaComponent();
@@ -159,7 +134,8 @@ public void processUpdateOutOfOrderMetaModelTest() {
.build();
// Pipe in a record that was produced 10 seconds ago, 5 seconds before metaComponent's lastCheck.
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now().minusSeconds(10)));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).withTimestamp(Instant.now().minusSeconds(10)).build());
qm.getPersistenceManager().refresh(metaComponent);
assertThat(metaComponent).isNotNull();
@@ -172,7 +148,7 @@ public void processUpdateOutOfOrderMetaModelTest() {
}
@Test
- public void processUpdateIntegrityResultTest() {
+ public void processUpdateIntegrityResultTest() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -204,7 +180,8 @@ public void processUpdateIntegrityResultTest() {
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -225,7 +202,7 @@ public void processUpdateIntegrityResultTest() {
}
@Test
- public void testIntegrityCheckWhenComponentHashIsMissing() {
+ public void testIntegrityCheckWhenComponentHashIsMissing() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -256,7 +233,8 @@ public void testIntegrityCheckWhenComponentHashIsMissing() {
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -275,7 +253,7 @@ public void testIntegrityCheckWhenComponentHashIsMissing() {
}
@Test
- public void testIntegrityAnalysisWillNotBePerformedIfNoIntegrityDataInResult() {
+ public void testIntegrityAnalysisWillNotBePerformedIfNoIntegrityDataInResult() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -306,14 +284,15 @@ public void testIntegrityAnalysisWillNotBePerformedIfNoIntegrityDataInResult() {
.setPurl("pkg:maven/foo/bar@1.2.3"))
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
IntegrityAnalysis analysis = qm.getIntegrityAnalysisByComponentUuid(c.getUuid());
assertThat(analysis).isNull();
}
@Test
- public void testIntegrityCheckWillNotBeDoneIfComponentUuidAndIntegrityDataIsMissing() {
+ public void testIntegrityCheckWillNotBeDoneIfComponentUuidAndIntegrityDataIsMissing() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -344,14 +323,15 @@ public void testIntegrityCheckWillNotBeDoneIfComponentUuidAndIntegrityDataIsMiss
//component uuid has not been set
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
IntegrityAnalysis analysis = qm.getIntegrityAnalysisByComponentUuid(c.getUuid());
assertThat(analysis).isNull();
}
@Test
- public void testIntegrityIfResultHasIntegrityDataAndComponentUuidIsMissing() {
+ public void testIntegrityIfResultHasIntegrityDataAndComponentUuidIsMissing() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -383,7 +363,8 @@ public void testIntegrityIfResultHasIntegrityDataAndComponentUuidIsMissing() {
//component uuid has not been set
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
IntegrityAnalysis analysis = qm.getIntegrityAnalysisByComponentUuid(c.getUuid());
assertThat(analysis).isNotNull();
@@ -393,8 +374,7 @@ public void testIntegrityIfResultHasIntegrityDataAndComponentUuidIsMissing() {
@Test
- public void testIntegrityCheckWillNotBeDoneIfComponentIsNotInDb() {
-
+ public void testIntegrityCheckWillNotBeDoneIfComponentIsNotInDb() throws Exception {
UUID uuid = UUID.randomUUID();
var integrityMetaComponent = new IntegrityMetaComponent();
@@ -413,14 +393,15 @@ public void testIntegrityCheckWillNotBeDoneIfComponentIsNotInDb() {
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
IntegrityAnalysis analysis = qm.getIntegrityAnalysisByComponentUuid(uuid);
assertThat(analysis).isNull();
}
@Test
- public void testIntegrityCheckShouldReturnComponentHashMissing() {
+ public void testIntegrityCheckShouldReturnComponentHashMissing() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -450,7 +431,8 @@ public void testIntegrityCheckShouldReturnComponentHashMissing() {
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -469,7 +451,7 @@ public void testIntegrityCheckShouldReturnComponentHashMissing() {
}
@Test
- public void testIntegrityCheckShouldReturnComponentHashMissingAndMatchUnknown() {
+ public void testIntegrityCheckShouldReturnComponentHashMissingAndMatchUnknown() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -498,7 +480,8 @@ public void testIntegrityCheckShouldReturnComponentHashMissingAndMatchUnknown()
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -515,7 +498,7 @@ public void testIntegrityCheckShouldReturnComponentHashMissingAndMatchUnknown()
}
@Test
- public void testIntegrityCheckShouldFailIfNoHashMatch() {
+ public void testIntegrityCheckShouldFailIfNoHashMatch() throws Exception {
// Create an active project with one component.
final var projectA = qm.createProject("acme-app-a", null, "1.0.0", null, null, null, true, false);
final var componentProjectA = new Component();
@@ -548,7 +531,8 @@ public void testIntegrityCheckShouldFailIfNoHashMatch() {
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -567,7 +551,7 @@ public void testIntegrityCheckShouldFailIfNoHashMatch() {
}
@Test
- public void processUpdateIntegrityResultNotAvailableTest() {
+ public void processUpdateIntegrityResultNotAvailableTest() throws Exception {
var integrityMetaComponent = new IntegrityMetaComponent();
integrityMetaComponent.setPurl("pkg:maven/foo/bar@1.2.3");
integrityMetaComponent.setStatus(FetchStatus.IN_PROGRESS);
@@ -582,7 +566,8 @@ public void processUpdateIntegrityResultNotAvailableTest() {
.setIntegrityMeta(IntegrityMeta.newBuilder().setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -594,8 +579,7 @@ public void processUpdateIntegrityResultNotAvailableTest() {
}
@Test
- public void processUpdateOldIntegrityResultSent() {
-
+ public void processUpdateOldIntegrityResultSent() throws Exception {
Date date = Date.from(Instant.now().minus(15, ChronoUnit.MINUTES));
var integrityMetaComponent = new IntegrityMetaComponent();
integrityMetaComponent.setPurl("pkg:maven/foo/bar@1.2.3");
@@ -613,7 +597,8 @@ public void processUpdateOldIntegrityResultSent() {
.setSha1("a94a8fe5ccb19ba61c4c0873d391e587982fbbd3").setMetaSourceUrl("test2").build())
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -626,7 +611,7 @@ public void processUpdateOldIntegrityResultSent() {
@Test
- public void processBothMetaModelAndIntegrityMeta() {
+ public void processBothMetaModelAndIntegrityMeta() throws Exception {
final var published = Instant.now().minus(5, ChronoUnit.MINUTES);
var integrityMetaComponent = new IntegrityMetaComponent();
integrityMetaComponent.setPurl("pkg:maven/foo/bar@1.2.3");
@@ -646,7 +631,8 @@ public void processBothMetaModelAndIntegrityMeta() {
.setMetaSourceUrl("test").build())
.build();
- inputTopic.pipeInput("pkg:maven/foo/bar", result);
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
final RepositoryMetaComponent metaComponent =
qm.getRepositoryMetaComponent(RepositoryType.MAVEN, "foo", "bar");
@@ -666,7 +652,7 @@ public void processBothMetaModelAndIntegrityMeta() {
}
@Test
- public void processUpdateIntegrityResultNotSentTest() {
+ public void processUpdateIntegrityResultNotSentTest() throws Exception {
var integrityMetaComponent = new IntegrityMetaComponent();
integrityMetaComponent.setPurl("pkg:maven/foo/bar@1.2.3");
integrityMetaComponent.setStatus(FetchStatus.IN_PROGRESS);
@@ -680,7 +666,8 @@ public void processUpdateIntegrityResultNotSentTest() {
.setPurl("pkg:maven/foo/bar@1.2.3"))
.build();
- inputTopic.pipeInput(new TestRecord<>("pkg:maven/foo/bar@1.2.3", result, Instant.now()));
+ final var processor = new RepositoryMetaResultProcessor();
+ processor.process(aConsumerRecord("pkg:maven/foo/bar@1.2.3", result).build());
qm.getPersistenceManager().refresh(integrityMetaComponent);
integrityMetaComponent = qm.getIntegrityMetaComponent("pkg:maven/foo/bar@1.2.3");
assertThat(integrityMetaComponent).isNotNull();
@@ -690,4 +677,5 @@ public void processUpdateIntegrityResultNotSentTest() {
assertThat(integrityMetaComponent.getLastFetch()).isEqualTo(date);
assertThat(integrityMetaComponent.getStatus()).isEqualTo(FetchStatus.IN_PROGRESS);
}
+
}
\ No newline at end of file
diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java
index 4945ca0b4..e6a30780e 100644
--- a/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/processor/VulnerabilityScanResultProcessorTest.java
@@ -5,26 +5,16 @@
import junitparams.Parameters;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeaders;
-import org.apache.kafka.streams.StreamsBuilder;
-import org.apache.kafka.streams.TestInputTopic;
-import org.apache.kafka.streams.TestOutputTopic;
-import org.apache.kafka.streams.TopologyTestDriver;
-import org.apache.kafka.streams.kstream.Consumed;
-import org.apache.kafka.streams.kstream.Produced;
-import org.apache.kafka.streams.test.TestRecord;
import org.cyclonedx.proto.v1_4.Advisory;
import org.cyclonedx.proto.v1_4.Bom;
import org.cyclonedx.proto.v1_4.Property;
import org.cyclonedx.proto.v1_4.Source;
import org.cyclonedx.proto.v1_4.VulnerabilityRating;
import org.cyclonedx.proto.v1_4.VulnerabilityReference;
-import org.dependencytrack.AbstractPostgresEnabledTest;
import org.dependencytrack.TestCacheManager;
+import org.dependencytrack.event.kafka.KafkaEventDispatcher;
import org.dependencytrack.event.kafka.KafkaEventHeaders;
import org.dependencytrack.event.kafka.KafkaTopics;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufDeserializer;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerde;
-import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer;
import org.dependencytrack.model.Analysis;
import org.dependencytrack.model.AnalysisComment;
import org.dependencytrack.model.AnalysisJustification;
@@ -56,7 +46,6 @@
import org.dependencytrack.proto.vulnanalysis.v1.ScanResult;
import org.dependencytrack.proto.vulnanalysis.v1.Scanner;
import org.dependencytrack.proto.vulnanalysis.v1.ScannerResult;
-import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -88,11 +77,9 @@
import static org.dependencytrack.util.KafkaTestUtil.deserializeValue;
@RunWith(JUnitParamsRunner.class)
-public class VulnerabilityScanResultProcessorTest extends AbstractPostgresEnabledTest {
+public class VulnerabilityScanResultProcessorTest extends AbstractProcessorTest {
- private TopologyTestDriver testDriver;
- private TestInputTopic inputTopic;
- private TestOutputTopic outputTopic;
+ private VulnerabilityScanResultProcessor processor;
@Before
public void setUp() throws Exception {
@@ -102,35 +89,13 @@ public void setUp() throws Exception {
final var scriptHost = new CelPolicyScriptHost(cacheManager, CelPolicyType.VULNERABILITY);
final var policyProvider = new DatabaseVulnerabilityPolicyProvider();
final var policyEvaluator = new CelVulnerabilityPolicyEvaluator(policyProvider, scriptHost, cacheManager);
-
- final var streamsBuilder = new StreamsBuilder();
- streamsBuilder
- .stream("input-topic", Consumed
- .with(new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser())))
- .processValues(() -> new VulnerabilityScanResultProcessor(policyEvaluator))
- .to("output-topic", Produced
- .with(new KafkaProtobufSerde<>(ScanKey.parser()), new KafkaProtobufSerde<>(ScanResult.parser())));
-
- testDriver = new TopologyTestDriver(streamsBuilder.build());
- inputTopic = testDriver.createInputTopic("input-topic",
- new KafkaProtobufSerializer<>(), new KafkaProtobufSerializer<>());
- outputTopic = testDriver.createOutputTopic("output-topic",
- new KafkaProtobufDeserializer<>(ScanKey.parser()), new KafkaProtobufDeserializer<>(ScanResult.parser()));
+ processor = new VulnerabilityScanResultProcessor(new KafkaEventDispatcher(), policyEvaluator);
new CweImporter().processCweDefinitions(); // Required for CWE mapping
}
- @After
- public void tearDown() {
- if (testDriver != null) {
- testDriver.close();
- }
-
- super.tearDown();
- }
-
@Test
- public void dropFailedScanResultTest() {
+ public void dropFailedScanResultTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -152,11 +117,14 @@ public void dropFailedScanResultTest() {
.setFailureReason("just because"))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
-
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
assertThat(kafkaMockProducer.history()).satisfiesExactly(
+ record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ },
record -> {
assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_ANALYZER.name());
final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_ANALYZER, record);
@@ -172,7 +140,7 @@ record -> {
}
@Test
- public void dropPendingScanResultTest() {
+ public void dropPendingScanResultTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -193,15 +161,17 @@ public void dropPendingScanResultTest() {
.setStatus(SCAN_STATUS_PENDING))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
-
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
@Test
- public void processSuccessfulScanResultWhenComponentDoesNotExistTest() {
+ public void processSuccessfulScanResultWhenComponentDoesNotExistTest() throws Exception {
final var componentUuid = UUID.randomUUID();
final var scanToken = UUID.randomUUID().toString();
final var scanKey = ScanKey.newBuilder().setScanToken(scanToken).setComponentUuid(componentUuid.toString()).build();
@@ -216,15 +186,17 @@ public void processSuccessfulScanResultWhenComponentDoesNotExistTest() {
.setBom(Bom.newBuilder().addVulnerabilities(createVuln("INT-001", "INTERNAL"))))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
-
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
@Test
- public void processSuccessfulScanResult() {
+ public void processSuccessfulScanResult() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -264,9 +236,7 @@ public void processSuccessfulScanResult() {
headers.add(KafkaEventHeaders.VULN_ANALYSIS_LEVEL, VulnerabilityAnalysisLevel.BOM_UPLOAD_ANALYSIS.name().getBytes());
headers.add(KafkaEventHeaders.IS_NEW_COMPONENT, "true".getBytes());
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult, headers));
-
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).withHeaders(headers).build());
qm.getPersistenceManager().refresh(component);
assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder(
@@ -295,6 +265,11 @@ public void processSuccessfulScanResult() {
);
assertThat(kafkaMockProducer.history()).satisfiesExactly(
+ record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ },
record -> {
assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY.name());
final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABLE_DEPENDENCY, record);
@@ -334,7 +309,7 @@ record -> {
}
@Test
- public void processSuccessfulScanResultWithExistingFindingTest() {
+ public void processSuccessfulScanResultWithExistingFindingTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -362,7 +337,7 @@ public void processSuccessfulScanResultWithExistingFindingTest() {
.setBom(Bom.newBuilder().addVulnerabilities(createVuln("CVE-001", "NVD"))))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().refreshAll(component, vulnerability);
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -378,7 +353,11 @@ public void processSuccessfulScanResultWithExistingFindingTest() {
assertThat(attribution.getAnalyzerIdentity()).isEqualTo(AnalyzerIdentity.OSSINDEX_ANALYZER);
// Because the vulnerability was reported already, no notification must be sent.
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
private Object[] canUpdateExistingVulnerabilityTestParams() {
@@ -422,7 +401,7 @@ private Object[] canUpdateExistingVulnerabilityTestParams() {
@Parameters(method = "canUpdateExistingVulnerabilityTestParams")
public void canUpdateExistingVulnerabilityTest(final String vulnId, final String vulnSource, final Scanner scanner,
final ConfigPropertyConstants mirrorSourceConfigProperty,
- final String mirrorSourceConfigPropertyValue, final boolean expectModified) {
+ final String mirrorSourceConfigPropertyValue, final boolean expectModified) throws Exception {
if (mirrorSourceConfigProperty != null && mirrorSourceConfigPropertyValue != null) {
qm.createConfigProperty(
mirrorSourceConfigProperty.getGroupName(),
@@ -462,7 +441,7 @@ public void canUpdateExistingVulnerabilityTest(final String vulnId, final String
.build())))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().refreshAll(component, vulnerability);
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -479,7 +458,7 @@ public void canUpdateExistingVulnerabilityTest(final String vulnId, final String
}
@Test
- public void updateExistingVulnerabilityTest() {
+ public void updateExistingVulnerabilityTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -564,7 +543,7 @@ public void updateExistingVulnerabilityTest() {
.build())))
.build();
- inputTopic.pipeInput(scanKey, scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().refreshAll(component, vulnerability);
assertThat(component.getVulnerabilities()).hasSize(1);
@@ -601,7 +580,7 @@ public void updateExistingVulnerabilityTest() {
}
@Test
- public void analysisThroughPolicyNewAnalysisTest() {
+ public void analysisThroughPolicyNewAnalysisTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -651,8 +630,7 @@ public void analysisThroughPolicyNewAnalysisTest() {
createVuln(newVuln.getVulnId(), newVuln.getSource())
))))
.build();
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult));
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().evictAll();
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -691,6 +669,11 @@ public void analysisThroughPolicyNewAnalysisTest() {
// TODO: There should be PROJECT_AUDIT_CHANGE notifications.
assertThat(kafkaMockProducer.history()).satisfiesExactly(
+ record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ },
record -> {
assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name());
final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record);
@@ -708,7 +691,7 @@ record -> {
}
@Test
- public void analysisThroughPolicyNewAnalysisSuppressionTest() {
+ public void analysisThroughPolicyNewAnalysisSuppressionTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -750,8 +733,7 @@ public void analysisThroughPolicyNewAnalysisSuppressionTest() {
createVuln(newVuln.getVulnId(), newVuln.getSource())
))))
.build();
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult));
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().evictAll();
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -785,11 +767,15 @@ public void analysisThroughPolicyNewAnalysisSuppressionTest() {
// The vulnerability was suppressed, so no notifications to be expected.
// TODO: There should be PROJECT_AUDIT_CHANGE notifications.
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
@Test
- public void analysisThroughPolicyExistingDifferentAnalysisTest() {
+ public void analysisThroughPolicyExistingDifferentAnalysisTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -851,8 +837,7 @@ public void analysisThroughPolicyExistingDifferentAnalysisTest() {
createVuln(vuln.getVulnId(), vuln.getSource())
))))
.build();
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult));
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().evictAll();
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -896,11 +881,15 @@ public void analysisThroughPolicyExistingDifferentAnalysisTest() {
// The vulnerability already existed, so no notifications to be expected.
// TODO: There should be PROJECT_AUDIT_CHANGE notifications.
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
@Test
- public void analysisThroughPolicyExistingEqualAnalysisTest() {
+ public void analysisThroughPolicyExistingEqualAnalysisTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -957,8 +946,7 @@ public void analysisThroughPolicyExistingEqualAnalysisTest() {
createVuln(vuln.getVulnId(), vuln.getSource())
))))
.build();
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult));
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().evictAll();
assertThat(component.getVulnerabilities()).satisfiesExactly(
@@ -984,11 +972,15 @@ public void analysisThroughPolicyExistingEqualAnalysisTest() {
});
// The vulnerability already existed, so no notifications to be expected.
- assertThat(kafkaMockProducer.history()).isEmpty();
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ });
}
@Test
- public void analysisThroughPolicyWithAliasesTest() {
+ public void analysisThroughPolicyWithAliasesTest() throws Exception {
final var project = new Project();
project.setName("acme-app");
project.setVersion("1.0.0");
@@ -1061,8 +1053,7 @@ public void analysisThroughPolicyWithAliasesTest() {
.build()
))))
.build();
- inputTopic.pipeInput(new TestRecord<>(scanKey, scanResult));
- assertThat(outputTopic.readValuesToList()).containsOnly(scanResult);
+ processor.process(aConsumerRecord(scanKey, scanResult).build());
qm.getPersistenceManager().evictAll();
assertThat(component.getVulnerabilities()).satisfiesExactlyInAnyOrder(
@@ -1085,6 +1076,23 @@ public void analysisThroughPolicyWithAliasesTest() {
assertThat(qm.getAnalysis(component, v)).isNull();
}
);
+
+ assertThat(kafkaMockProducer.history()).satisfiesExactly(
+ record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED.name());
+ final ScanResult strippedResult = deserializeValue(KafkaTopics.VULN_ANALYSIS_RESULT_PROCESSED, record);
+ assertThat(strippedResult.getScannerResultsList()).noneMatch(ScannerResult::hasBom);
+ },
+ record -> {
+ assertThat(record.topic()).isEqualTo(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY.name());
+ final Notification notification = deserializeValue(KafkaTopics.NOTIFICATION_NEW_VULNERABILITY, record);
+ assertThat(notification.getScope()).isEqualTo(SCOPE_PORTFOLIO);
+ assertThat(notification.getLevel()).isEqualTo(LEVEL_INFORMATIONAL);
+ assertThat(notification.getGroup()).isEqualTo(GROUP_NEW_VULNERABILITY);
+ assertThat(notification.getSubject().is(NewVulnerabilitySubject.class)).isTrue();
+ final var subject = notification.getSubject().unpack(NewVulnerabilitySubject.class);
+ assertThat(subject.getVulnerabilityAnalysisLevel()).isEqualTo("PERIODIC_ANALYSIS");
+ });
}
private org.cyclonedx.proto.v1_4.Vulnerability createVuln(final String id, final String source) {
diff --git a/src/test/java/org/dependencytrack/event/kafka/processor/api/ProcessorManagerTest.java b/src/test/java/org/dependencytrack/event/kafka/processor/api/ProcessorManagerTest.java
new file mode 100644
index 000000000..0602fd4f6
--- /dev/null
+++ b/src/test/java/org/dependencytrack/event/kafka/processor/api/ProcessorManagerTest.java
@@ -0,0 +1,173 @@
+package org.dependencytrack.event.kafka.processor.api;
+
+import alpine.Config;
+import net.mguenther.kafka.junit.ExternalKafkaCluster;
+import net.mguenther.kafka.junit.KeyValue;
+import net.mguenther.kafka.junit.SendKeyValues;
+import net.mguenther.kafka.junit.TopicConfig;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.dependencytrack.common.ConfigKey;
+import org.dependencytrack.event.kafka.KafkaTopics.Topic;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.testcontainers.redpanda.RedpandaContainer;
+import org.testcontainers.utility.DockerImageName;
+
+import java.time.Duration;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.awaitility.Awaitility.await;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class ProcessorManagerTest {
+
+ @Rule
+ public RedpandaContainer kafkaContainer = new RedpandaContainer(DockerImageName
+ .parse("docker.redpanda.com/vectorized/redpanda:v23.2.13"));
+
+ private ExternalKafkaCluster kafka;
+ private Config configMock;
+
+ @Before
+ public void setUp() {
+ kafka = ExternalKafkaCluster.at(kafkaContainer.getBootstrapServers());
+
+ configMock = mock(Config.class);
+ when(configMock.getProperty(eq(ConfigKey.KAFKA_BOOTSTRAP_SERVERS)))
+ .thenReturn(kafkaContainer.getBootstrapServers());
+ }
+
+ @Test
+ public void test() throws Exception {
+ final var inputTopic = new Topic<>("input", Serdes.String(), Serdes.String());
+ kafka.createTopic(TopicConfig.withName(inputTopic.name()).withNumberOfPartitions(3));
+
+ final var recordsProcessed = new AtomicInteger(0);
+
+ when(configMock.getPassThroughProperties(eq("kafka.processor.foo.consumer")))
+ .thenReturn(Map.of(
+ "kafka.processor.foo.processing.order", "key",
+ "kafka.processor.foo.max.concurrency", "5",
+ "kafka.processor.foo.consumer.auto.offset.reset", "earliest"
+ ));
+
+ final Processor processor =
+ record -> recordsProcessed.incrementAndGet();
+
+ try (final var processorManager = new ProcessorManager(configMock)) {
+ processorManager.registerProcessor("foo", processor, inputTopic);
+
+ for (int i = 0; i < 100; i++) {
+ kafka.send(SendKeyValues.to("input", List.of(new KeyValue<>("foo" + i, "bar" + i)))
+ .with(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName())
+ .with(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()));
+ }
+
+ processorManager.startAll();
+
+ await("Record Processing")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> assertThat(recordsProcessed).hasValue(100));
+ }
+ }
+
+ @Test
+ public void testSingleRecordProcessorRetry() throws Exception {
+ final var inputTopic = new Topic<>("input", Serdes.String(), Serdes.String());
+ kafka.createTopic(TopicConfig.withName(inputTopic.name()).withNumberOfPartitions(3));
+
+ final var attemptsCounter = new AtomicInteger(0);
+
+ final var objectSpy = spy(new Object());
+ when(objectSpy.toString())
+ .thenThrow(new RuntimeException(new TimeoutException()))
+ .thenThrow(new RuntimeException(new TimeoutException()))
+ .thenThrow(new RuntimeException(new TimeoutException()))
+ .thenReturn("done");
+
+ final Processor processor = record -> {
+ attemptsCounter.incrementAndGet();
+ objectSpy.toString();
+ };
+
+ when(configMock.getPassThroughProperties(eq("kafka.processor.foo")))
+ .thenReturn(Map.of(
+ "kafka.processor.foo.retry.initial.delay.ms", "5",
+ "kafka.processor.foo.retry.multiplier", "1",
+ "kafka.processor.foo.retry.max.delay.ms", "10"
+ ));
+ when(configMock.getPassThroughProperties(eq("kafka.processor.foo.consumer")))
+ .thenReturn(Map.of(
+ "kafka.processor.foo.consumer.auto.offset.reset", "earliest"
+ ));
+
+ try (final var processorManager = new ProcessorManager(configMock)) {
+ processorManager.registerProcessor("foo", processor, inputTopic);
+
+ kafka.send(SendKeyValues.to("input", List.of(new KeyValue<>("foo", "bar")))
+ .with(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName())
+ .with(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()));
+
+ processorManager.startAll();
+
+ await("Record Processing")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> assertThat(attemptsCounter).hasValue(4));
+ }
+ }
+
+ @Test
+ public void testBatchProcessor() throws Exception {
+ final var inputTopic = new Topic<>("input", Serdes.String(), Serdes.String());
+ kafka.createTopic(TopicConfig.withName(inputTopic.name()).withNumberOfPartitions(3));
+
+ final var recordsProcessed = new AtomicInteger(0);
+ final var actualBatchSizes = new ConcurrentLinkedQueue<>();
+
+ when(configMock.getPassThroughProperties(eq("kafka.processor.foo")))
+ .thenReturn(Map.of(
+ "kafka.processor.foo.processing.order", "key",
+ "kafka.processor.foo.max.batch.size", "100"
+ ));
+ when(configMock.getPassThroughProperties(eq("kafka.processor.foo.consumer")))
+ .thenReturn(Map.of(
+ "kafka.processor.foo.consumer.auto.offset.reset", "earliest"
+ ));
+
+ final BatchProcessor recordProcessor = records -> {
+ recordsProcessed.addAndGet(records.size());
+ actualBatchSizes.add(records.size());
+ };
+
+ try (final var processorManager = new ProcessorManager(configMock)) {
+ processorManager.registerBatchProcessor("foo", recordProcessor, inputTopic);
+
+ for (int i = 0; i < 1_000; i++) {
+ kafka.send(SendKeyValues.to("input", List.of(new KeyValue<>("foo" + i, "bar" + i)))
+ .with(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName())
+ .with(VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()));
+ }
+
+ processorManager.startAll();
+
+ await("Record Processing")
+ .atMost(Duration.ofSeconds(5))
+ .untilAsserted(() -> assertThat(recordsProcessed).hasValue(1_000));
+
+ assertThat(actualBatchSizes).containsOnly(100);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsDelayedBomProcessedNotificationTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java
similarity index 99%
rename from src/test/java/org/dependencytrack/event/kafka/KafkaStreamsDelayedBomProcessedNotificationTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java
index 758e4b47d..c84db7b3c 100644
--- a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsDelayedBomProcessedNotificationTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsDelayedBomProcessedNotificationTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka;
+package org.dependencytrack.event.kafka.streams;
import net.mguenther.kafka.junit.KeyValue;
import net.mguenther.kafka.junit.ReadKeyValues;
@@ -6,6 +6,7 @@
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
+import org.dependencytrack.event.kafka.KafkaTopics;
import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer;
import org.dependencytrack.model.Project;
import org.dependencytrack.model.VulnerabilityScan;
diff --git a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsPostgresTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsPostgresTest.java
similarity index 97%
rename from src/test/java/org/dependencytrack/event/kafka/KafkaStreamsPostgresTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsPostgresTest.java
index 1122a3325..b0dc20657 100644
--- a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsPostgresTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsPostgresTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka;
+package org.dependencytrack.event.kafka.streams;
import net.mguenther.kafka.junit.ExternalKafkaCluster;
import net.mguenther.kafka.junit.TopicConfig;
@@ -6,6 +6,7 @@
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.dependencytrack.AbstractPostgresEnabledTest;
+import org.dependencytrack.event.kafka.KafkaTopics;
import org.dependencytrack.event.kafka.serialization.KafkaProtobufDeserializer;
import org.dependencytrack.proto.notification.v1.Notification;
import org.junit.After;
diff --git a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java
similarity index 99%
rename from src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java
index 79bccb001..e3c632f93 100644
--- a/src/test/java/org/dependencytrack/event/kafka/KafkaStreamsTopologyTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/KafkaStreamsTopologyTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka;
+package org.dependencytrack.event.kafka.streams;
import alpine.event.framework.Event;
import alpine.event.framework.EventService;
@@ -19,6 +19,7 @@
import org.dependencytrack.event.PortfolioVulnerabilityAnalysisEvent;
import org.dependencytrack.event.ProjectMetricsUpdateEvent;
import org.dependencytrack.event.ProjectPolicyEvaluationEvent;
+import org.dependencytrack.event.kafka.KafkaTopics;
import org.dependencytrack.event.kafka.serialization.KafkaProtobufSerializer;
import org.dependencytrack.model.Policy;
import org.dependencytrack.model.PolicyCondition;
diff --git a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java
similarity index 97%
rename from src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandlerTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java
index 4ce0df779..43aac8de9 100644
--- a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsDeserializationExceptionHandlerTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsDeserializationExceptionHandlerTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.errors.SerializationException;
diff --git a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java
similarity index 97%
rename from src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandlerTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java
index 0e2c8e6b4..741933f92 100644
--- a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsProductionExceptionHandlerTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsProductionExceptionHandlerTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.errors.RecordTooLargeException;
diff --git a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandlerTest.java b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java
similarity index 97%
rename from src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandlerTest.java
rename to src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java
index 992330edf..4ee06fe3e 100644
--- a/src/test/java/org/dependencytrack/event/kafka/exception/KafkaStreamsUncaughtExceptionHandlerTest.java
+++ b/src/test/java/org/dependencytrack/event/kafka/streams/exception/KafkaStreamsUncaughtExceptionHandlerTest.java
@@ -1,4 +1,4 @@
-package org.dependencytrack.event.kafka.exception;
+package org.dependencytrack.event.kafka.streams.exception;
import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse;
import org.junit.Test;