-
-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implement foundational API for parallel-consumer based Kafka processors
Decoupled from #509 This merely adds an API on top of which processors can be implemented. We can migrate processors one-by-one from Kafka Streams to this API. Majority of this work was already done in #509, but got out of date due to changed priorities. At the very least said PR is good to take inspiration from. Relates to DependencyTrack/hyades#346 Relates to DependencyTrack/hyades#901 Relates to DependencyTrack/hyades#907 Signed-off-by: nscuro <[email protected]>
- Loading branch information
Showing
18 changed files
with
1,159 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
package org.dependencytrack.common; | ||
|
||
/** | ||
* Common fields for use with SLF4J's {@link org.slf4j.MDC}. | ||
*/ | ||
public final class MdcKeys { | ||
|
||
public static final String MDC_KAFKA_RECORD_TOPIC = "kafkaRecordTopic"; | ||
public static final String MDC_KAFKA_RECORD_PARTITION = "kafkaRecordPartition"; | ||
public static final String MDC_KAFKA_RECORD_OFFSET = "kafkaRecordOffset"; | ||
public static final String MDC_KAFKA_RECORD_KEY = "kafkaRecordKey"; | ||
|
||
private MdcKeys() { | ||
} | ||
|
||
} |
30 changes: 30 additions & 0 deletions
30
src/main/java/org/dependencytrack/event/kafka/processor/ProcessorInitializer.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
package org.dependencytrack.event.kafka.processor; | ||
|
||
import alpine.common.logging.Logger; | ||
import org.dependencytrack.event.kafka.processor.api.ProcessorManager; | ||
|
||
import javax.servlet.ServletContextEvent; | ||
import javax.servlet.ServletContextListener; | ||
|
||
public class ProcessorInitializer implements ServletContextListener { | ||
|
||
private static final Logger LOGGER = Logger.getLogger(ProcessorInitializer.class); | ||
|
||
static final ProcessorManager PROCESSOR_MANAGER = new ProcessorManager(); | ||
|
||
@Override | ||
public void contextInitialized(final ServletContextEvent event) { | ||
LOGGER.info("Initializing processors"); | ||
|
||
// TODO: Register processor here! | ||
|
||
PROCESSOR_MANAGER.startAll(); | ||
} | ||
|
||
@Override | ||
public void contextDestroyed(final ServletContextEvent event) { | ||
LOGGER.info("Stopping processors"); | ||
PROCESSOR_MANAGER.close(); | ||
} | ||
|
||
} |
17 changes: 17 additions & 0 deletions
17
src/main/java/org/dependencytrack/event/kafka/processor/ProcessorsHealthCheck.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
package org.dependencytrack.event.kafka.processor; | ||
|
||
import org.eclipse.microprofile.health.HealthCheck; | ||
import org.eclipse.microprofile.health.HealthCheckResponse; | ||
import org.eclipse.microprofile.health.Liveness; | ||
|
||
import static org.dependencytrack.event.kafka.processor.ProcessorInitializer.PROCESSOR_MANAGER; | ||
|
||
@Liveness | ||
public class ProcessorsHealthCheck implements HealthCheck { | ||
|
||
@Override | ||
public HealthCheckResponse call() { | ||
return PROCESSOR_MANAGER.probeHealth(); | ||
} | ||
|
||
} |
87 changes: 87 additions & 0 deletions
87
src/main/java/org/dependencytrack/event/kafka/processor/api/AbstractProcessingStrategy.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
package org.dependencytrack.event.kafka.processor.api; | ||
|
||
import org.apache.commons.lang3.exception.ExceptionUtils; | ||
import org.apache.http.conn.ConnectTimeoutException; | ||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.apache.kafka.common.errors.SerializationException; | ||
import org.apache.kafka.common.serialization.Serde; | ||
import org.datanucleus.api.jdo.exceptions.ConnectionInUseException; | ||
import org.datanucleus.store.query.QueryInterruptedException; | ||
import org.dependencytrack.event.kafka.processor.exception.RetryableProcessingException; | ||
import org.postgresql.util.PSQLState; | ||
|
||
import javax.jdo.JDOOptimisticVerificationException; | ||
import java.net.SocketTimeoutException; | ||
import java.sql.SQLException; | ||
import java.sql.SQLTransientConnectionException; | ||
import java.sql.SQLTransientException; | ||
import java.util.List; | ||
import java.util.concurrent.TimeoutException; | ||
|
||
/** | ||
* An abstract {@link ProcessingStrategy} that provides various shared functionality. | ||
* | ||
* @param <K> Type of the {@link ConsumerRecord} key | ||
* @param <V> Type of the {@link ConsumerRecord} value | ||
*/ | ||
abstract class AbstractProcessingStrategy<K, V> implements ProcessingStrategy { | ||
|
||
private final Serde<K> keySerde; | ||
private final Serde<V> valueSerde; | ||
|
||
AbstractProcessingStrategy(final Serde<K> keySerde, final Serde<V> valueSerde) { | ||
this.keySerde = keySerde; | ||
this.valueSerde = valueSerde; | ||
} | ||
|
||
/** | ||
* @param record The {@link ConsumerRecord} to deserialize key and value of | ||
* @return A {@link ConsumerRecord} with deserialized key and value | ||
* @throws SerializationException When deserializing the {@link ConsumerRecord} failed | ||
*/ | ||
ConsumerRecord<K, V> deserialize(final ConsumerRecord<byte[], byte[]> record) { | ||
final K deserializedKey; | ||
final V deserializedValue; | ||
try { | ||
deserializedKey = keySerde.deserializer().deserialize(record.topic(), record.key()); | ||
deserializedValue = valueSerde.deserializer().deserialize(record.topic(), record.value()); | ||
} catch (RuntimeException e) { | ||
if (e instanceof SerializationException) { | ||
throw e; | ||
} | ||
|
||
throw new SerializationException(e); | ||
} | ||
|
||
return new ConsumerRecord<>(record.topic(), record.partition(), record.offset(), | ||
record.timestamp(), record.timestampType(), record.serializedKeySize(), record.serializedValueSize(), | ||
deserializedKey, deserializedValue, record.headers(), record.leaderEpoch()); | ||
} | ||
|
||
private static final List<Class<? extends Exception>> KNOWN_TRANSIENT_EXCEPTIONS = List.of( | ||
ConnectTimeoutException.class, | ||
ConnectionInUseException.class, | ||
JDOOptimisticVerificationException.class, | ||
QueryInterruptedException.class, | ||
SocketTimeoutException.class, | ||
SQLTransientException.class, | ||
SQLTransientConnectionException.class, | ||
TimeoutException.class | ||
); | ||
|
||
boolean isRetryableException(final Throwable throwable) { | ||
if (throwable instanceof RetryableProcessingException) { | ||
return true; | ||
} | ||
|
||
final boolean isKnownTransientException = ExceptionUtils.getThrowableList(throwable).stream() | ||
.anyMatch(cause -> KNOWN_TRANSIENT_EXCEPTIONS.contains(cause.getClass())); | ||
if (isKnownTransientException) { | ||
return true; | ||
} | ||
|
||
return ExceptionUtils.getRootCause(throwable) instanceof final SQLException se | ||
&& PSQLState.isConnectionError(se.getSQLState()); | ||
} | ||
|
||
} |
69 changes: 69 additions & 0 deletions
69
src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessingStrategy.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
package org.dependencytrack.event.kafka.processor.api; | ||
|
||
import alpine.common.logging.Logger; | ||
import io.confluent.parallelconsumer.PCRetriableException; | ||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.apache.kafka.common.errors.SerializationException; | ||
import org.apache.kafka.common.serialization.Serde; | ||
import org.dependencytrack.common.MdcKeys; | ||
import org.dependencytrack.event.kafka.processor.exception.ProcessingException; | ||
import org.slf4j.MDC; | ||
|
||
import java.util.ArrayList; | ||
import java.util.List; | ||
|
||
/** | ||
* A {@link ProcessingStrategy} that processes records in batches. | ||
* | ||
* @param <K> Type of the {@link ConsumerRecord} key | ||
* @param <V> Type of the {@link ConsumerRecord} value | ||
*/ | ||
class BatchProcessingStrategy<K, V> extends AbstractProcessingStrategy<K, V> { | ||
|
||
private static final Logger LOGGER = Logger.getLogger(BatchProcessingStrategy.class); | ||
|
||
private final BatchProcessor<K, V> batchProcessor; | ||
|
||
BatchProcessingStrategy(final BatchProcessor<K, V> batchProcessor, | ||
final Serde<K> keySerde, final Serde<V> valueSerde) { | ||
super(keySerde, valueSerde); | ||
this.batchProcessor = batchProcessor; | ||
} | ||
|
||
/** | ||
* {@inheritDoc} | ||
*/ | ||
@Override | ||
public void processRecords(final List<ConsumerRecord<byte[], byte[]>> records) { | ||
final var deserializedRecords = new ArrayList<ConsumerRecord<K, V>>(records.size()); | ||
for (final ConsumerRecord<byte[], byte[]> record : records) { | ||
try (var ignoredMdcKafkaRecordTopic = MDC.putCloseable(MdcKeys.MDC_KAFKA_RECORD_TOPIC, record.topic()); | ||
var ignoredMdcKafkaRecordPartition = MDC.putCloseable(MdcKeys.MDC_KAFKA_RECORD_PARTITION, String.valueOf(record.partition())); | ||
var ignoredMdcKafkaRecordOffset = MDC.putCloseable(MdcKeys.MDC_KAFKA_RECORD_OFFSET, String.valueOf(record.offset()))) { | ||
deserializedRecords.add(deserialize(record)); | ||
} catch (SerializationException e) { | ||
// TODO: Consider supporting error handlers, e.g. to send record to DLT. | ||
LOGGER.error("Failed to deserialize record; Skipping", e); | ||
} | ||
} | ||
|
||
if (deserializedRecords.isEmpty()) { | ||
LOGGER.warn("All of the %d records in this batch failed to be deserialized".formatted(records.size())); | ||
return; | ||
} | ||
|
||
try { | ||
batchProcessor.process(deserializedRecords); | ||
} catch (ProcessingException | RuntimeException e) { | ||
if (isRetryableException(e)) { | ||
LOGGER.warn("Encountered retryable exception while processing %d records".formatted(deserializedRecords.size()), e); | ||
throw new PCRetriableException(e); | ||
} | ||
|
||
LOGGER.error("Encountered non-retryable exception while processing %d records; Skipping".formatted(deserializedRecords.size()), e); | ||
// TODO: Consider supporting error handlers, e.g. to send records to DLT. | ||
// Skip records to avoid poison-pill scenario. | ||
} | ||
} | ||
|
||
} |
26 changes: 26 additions & 0 deletions
26
src/main/java/org/dependencytrack/event/kafka/processor/api/BatchProcessor.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,26 @@ | ||
package org.dependencytrack.event.kafka.processor.api; | ||
|
||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.dependencytrack.event.kafka.processor.exception.ProcessingException; | ||
|
||
import java.util.List; | ||
|
||
/** | ||
* A processor of {@link ConsumerRecord} batches. | ||
* | ||
* @param <K> Type of the {@link ConsumerRecord} key | ||
* @param <V> Type of the {@link ConsumerRecord} value | ||
*/ | ||
public interface BatchProcessor<K, V> { | ||
|
||
/** | ||
* Process a batch of {@link ConsumerRecord}s. | ||
* <p> | ||
* This method may be called by multiple threads concurrently and thus MUST be thread safe! | ||
* | ||
* @param records Batch of {@link ConsumerRecord}s to process | ||
* @throws ProcessingException When consuming the batch of {@link ConsumerRecord}s failed | ||
*/ | ||
void process(final List<ConsumerRecord<K, V>> records) throws ProcessingException; | ||
|
||
} |
16 changes: 16 additions & 0 deletions
16
src/main/java/org/dependencytrack/event/kafka/processor/api/ProcessingStrategy.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
package org.dependencytrack.event.kafka.processor.api; | ||
|
||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
|
||
import java.util.List; | ||
|
||
interface ProcessingStrategy { | ||
|
||
/** | ||
* Process zero or more {@link ConsumerRecord}s. | ||
* | ||
* @param records The {@link ConsumerRecord}s to process | ||
*/ | ||
void processRecords(final List<ConsumerRecord<byte[], byte[]>> records); | ||
|
||
} |
24 changes: 24 additions & 0 deletions
24
src/main/java/org/dependencytrack/event/kafka/processor/api/Processor.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
package org.dependencytrack.event.kafka.processor.api; | ||
|
||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.dependencytrack.event.kafka.processor.exception.ProcessingException; | ||
|
||
/** | ||
* A processor of individual {@link ConsumerRecord}s. | ||
* | ||
* @param <K> Type of the {@link ConsumerRecord} key | ||
* @param <V> Type of the {@link ConsumerRecord} value | ||
*/ | ||
public interface Processor<K, V> { | ||
|
||
/** | ||
* Process a {@link ConsumerRecord}. | ||
* <p> | ||
* This method may be called by multiple threads concurrently and thus MUST be thread safe! | ||
* | ||
* @param record The {@link ConsumerRecord} to process | ||
* @throws ProcessingException When processing the {@link ConsumerRecord} failed | ||
*/ | ||
void process(final ConsumerRecord<K, V> record) throws ProcessingException; | ||
|
||
} |
Oops, something went wrong.