Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

collect messages from Kafka #19572

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ def get_config(self, config_option):
def get_version(self):
return '0.0.0'

def get_remote_config(self, key):
return {'public_key': 'key', 'topic': "test", "offset": 0, "partition": 0}

def log(self, *args, **kwargs):
pass

Expand Down
9 changes: 9 additions & 0 deletions kafka_consumer/datadog_checks/kafka_consumer/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,15 @@ def _get_consumer_groups(self):
else:
return self.config._consumer_groups

def get_message(self, topic, partition, offset):
consumer = self.__create_consumer('datadog')
consumer.assign([TopicPartition(topic, partition, offset)])
message = consumer.poll(timeout=1)
consumer.close()
if message is None:
return None
return message.value()

def _list_consumer_group_offsets(self, cg_tp):
return self.kafka_client.list_consumer_group_offsets([cg_tp])

Expand Down
36 changes: 36 additions & 0 deletions kafka_consumer/datadog_checks/kafka_consumer/kafka_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,17 @@
import json
from collections import defaultdict
from time import time
import yaml

from datadog_checks.base import AgentCheck, is_affirmative
from datadog_checks.kafka_consumer.client import KafkaClient
from datadog_checks.kafka_consumer.config import KafkaConfig

try:
import datadog_agent
except ImportError:
from ..stubs import datadog_agent

MAX_TIMESTAMPS = 1000


Expand All @@ -24,6 +30,29 @@ def __init__(self, name, init_config, instances):
self.client = KafkaClient(self.config, self.log)
self.check_initializations.insert(0, self.config.validate_config)


def log_message(self):
print("logging message")
yamlConfig = datadog_agent.get_remote_config("test changed")
print("yaml config ", yamlConfig, type(yamlConfig))
parsedConfig = yaml.safe_load(str(yamlConfig))
print("parsed config is ", parsedConfig)
for cfg in parsedConfig.get("configs", []):
print("config is ", cfg)
topic = cfg.get("topic", None)
partition = cfg.get("partition", None)
offset = cfg.get("offset", None)
print("topic is ", topic, "partition is ", partition, "offset is ", offset)
if topic is None or partition is None or offset is None:
continue
message = self.client.get_message(topic, partition, offset)
self.send_event("Kafka message", message, ["topic:{}".format(topic), "partition:{}".format(partition), "offset:{}".format(offset)], 'kafka', "", severity="info")
print("message is ", message)
# print("now the last message")
# message = self.client.get_message('marvel', 0, 75)
# self.send_event("Kafka message", message, ["topic:marvel","partition:0","offset:75"], 'kafka', "", severity="info")
# print("message is ", message)

def check(self, _):
"""The main entrypoint of the check."""
# Fetch Kafka consumer offsets
Expand Down Expand Up @@ -91,6 +120,13 @@ def check(self, _):
broker_timestamps,
cluster_id,
)

try:
self.log_message()
except Exception as e:
print("oops", e)
self.log.exception("Error retrieving payload from Kafka for Data Streams %s", str(e))

if self.config._close_admin_client:
self.client.close_admin_client()

Expand Down
Loading