From 4be828a868fdc8c18217a79e708b20ee51fe7169 Mon Sep 17 00:00:00 2001 From: Robert Karlsson Date: Mon, 2 Dec 2024 14:26:26 +0900 Subject: [PATCH] Added readme --- kafka-event-queue/README.md | 76 ++++++++++++++++++++++++++++++++++--- 1 file changed, 70 insertions(+), 6 deletions(-) diff --git a/kafka-event-queue/README.md b/kafka-event-queue/README.md index bb2ea5390..f776be837 100644 --- a/kafka-event-queue/README.md +++ b/kafka-event-queue/README.md @@ -1,4 +1,5 @@ # Event Queue + ## Published Artifacts Group: `com.netflix.conductor` @@ -8,17 +9,80 @@ Group: `com.netflix.conductor` | conductor-kafka-event-queue | Support for integration with Kafka and consume events from it. | ## Modules + ### Kafka + https://kafka.apache.org/ -Provides ability to publish and consume messages from Kafka -#### Configuration -(Default values shown below) +## kafka-event-queue + +Provides ability to consume messages from Kafka + +## Configuration + +To enable the queue use set the following to true. + ```properties conductor.event-queues.kafka.enabled=true +``` + +There are is a set of shared properties these are: + +```properties +# If kafka should be used with event queues like SQS or AMPQ +conductor.default-event-queue.type=kafka + +# the bootstrap server ot use. conductor.event-queues.kafka.bootstrap-servers=kafka:29092 -conductor.event-queues.kafka.groupId=conductor-consumers + +# The topic to listen to conductor.event-queues.kafka.topic=conductor-event -conductor.event-queues.kafka.dlqTopic=conductor-dlq -conductor.event-queues.kafka.pollTimeDuration=100 + +# The dead letter queue to use for events that had some error. +conductor.event-queues.kafka.dlq-topic=conductor-dlq + +# topic prefix combined with conductor.default-event-queue.type +conductor.event-queues.kafka.listener-queue-prefix=conductor_ + +# The polling duration. Start at 500ms and reduce based on how your environment behaves. +conductor.event-queues.kafka.poll-time-duration=500ms +``` + +There are 3 clients that should be configured, there is the Consumer, responsible to consuming messages, Publisher that publishes messages to Kafka and the Admin which handles admin operations. + +The supported properties for the 3 clients are the ones included in `org.apache.kafka:kafka-clients:3.5.1` for each client type. + +## Consumer properties + +Example of consumer settings. + +```properties +conductor.event-queues.kafka.consumer.client.id=consumer-client +conductor.event-queues.kafka.consumer.auto.offset.reset=earliest +conductor.event-queues.kafka.consumer.enable.auto.commit=false +conductor.event-queues.kafka.consumer.fetch.min.bytes=1 +conductor.event-queues.kafka.consumer.max.poll.records=500 +conductor.event-queues.kafka.consumer.group-id=conductor-group +``` + +## Producer properties + +Example of producer settings. + +```properties +conductor.event-queues.kafka.producer.client.id=producer-client +conductor.event-queues.kafka.producer.acks=all +conductor.event-queues.kafka.producer.retries=5 +conductor.event-queues.kafka.producer.batch.size=16384 +conductor.event-queues.kafka.producer.linger.ms=10 +conductor.event-queues.kafka.producer.compression.type=gzip +``` + +## Admin properties + +Example of admin settings. + +```properties +conductor.event-queues.kafka.admin.client.id=admin-client +conductor.event-queues.kafka.admin.connections.max.idle.ms=10000 ```