-
Notifications
You must be signed in to change notification settings - Fork 1.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Create environments for the new kafka client #14022
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,14 +1,36 @@ | ||
# (C) Datadog, Inc. 2023-present | ||
# All rights reserved | ||
# Licensed under a 3-clause BSD style license (see LICENSE) | ||
from datadog_checks.kafka_consumer.client.kafka_client import KafkaClient | ||
|
||
|
||
class ConfluentKafkaClient: | ||
def __init__(self) -> None: | ||
pass | ||
class ConfluentKafkaClient(KafkaClient): | ||
def create_kafka_admin_client(self): | ||
raise NotImplementedError | ||
|
||
def get_consumer_offsets_dict(self): | ||
raise NotImplementedError | ||
|
||
def get_highwater_offsets(self): | ||
raise NotImplementedError | ||
|
||
def get_highwater_offsets_dict(self): | ||
raise NotImplementedError | ||
|
||
def reset_offsets(self): | ||
raise NotImplementedError | ||
|
||
def get_partitions_for_topic(self, topic): | ||
raise NotImplementedError | ||
|
||
def request_metadata_update(self): | ||
raise NotImplementedError | ||
|
||
def collect_broker_version(self): | ||
raise NotImplementedError | ||
|
||
def get_consumer_offsets(self): | ||
pass | ||
raise NotImplementedError | ||
|
||
def get_broker_offset(self): | ||
pass | ||
raise NotImplementedError |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
# (C) Datadog, Inc. 2023-present | ||
# All rights reserved | ||
# Licensed under a 3-clause BSD style license (see LICENSE) | ||
|
||
from datadog_checks.kafka_consumer.client.confluent_kafka_client import ConfluentKafkaClient | ||
from datadog_checks.kafka_consumer.client.kafka_client import KafkaClient | ||
from datadog_checks.kafka_consumer.client.kafka_python_client import KafkaPythonClient | ||
|
||
|
||
class GenericKafkaClient(KafkaClient): | ||
def __init__(self, config, tls_context, log) -> None: | ||
super().__init__(config, tls_context, log) | ||
self.use_legacy_client = config.use_legacy_client | ||
self.confluent_kafka_client = ConfluentKafkaClient(config, tls_context, log) if not self.use_legacy_client else None | ||
self.python_kafka_client = KafkaPythonClient(config, tls_context, log) | ||
|
||
def get_consumer_offsets(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_consumer_offsets() | ||
# return self.confluent_kafka_client.get_consumer_offsets() | ||
|
||
return self.python_kafka_client.get_consumer_offsets() | ||
|
||
def get_highwater_offsets(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_highwater_offsets() | ||
# return self.confluent_kafka_client.get_highwater_offsets() | ||
return self.python_kafka_client.get_highwater_offsets() | ||
|
||
def get_highwater_offsets_dict(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_highwater_offsets_dict() | ||
# return self.confluent_kafka_client.get_highwater_offsets_dict() | ||
return self.python_kafka_client.get_highwater_offsets_dict() | ||
|
||
def reset_offsets(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.reset_offsets() | ||
# return self.confluent_kafka_client.reset_offsets() | ||
return self.python_kafka_client.reset_offsets() | ||
|
||
def get_partitions_for_topic(self, topic): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_partitions_for_topic(topic) | ||
# return self.confluent_kafka_client.get_partitions_for_topic(topic) | ||
return self.python_kafka_client.get_partitions_for_topic(topic) | ||
|
||
def request_metadata_update(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.request_metadata_update() | ||
# return self.confluent_kafka_client.request_metadata_update() | ||
return self.python_kafka_client.request_metadata_update() | ||
|
||
def collect_broker_version(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.collect_broker_version() | ||
# return self.confluent_kafka_client.collect_broker_version() | ||
return self.python_kafka_client.collect_broker_version() | ||
|
||
def get_consumer_offsets_dict(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_consumer_offsets_dict() | ||
# return self.confluent_kafka_client.get_consumer_offsets_dict() | ||
return self.python_kafka_client.get_consumer_offsets_dict() | ||
|
||
def create_kafka_admin_client(self): | ||
# TODO when this method is implemented in ConfluentKafkaClient, replace this with: | ||
# if self.use_legacy_client: | ||
# return self.python_kafka_client.get_consumer_offsets() | ||
# return self.confluent_kafka_client.get_consumer_offsets() | ||
|
||
return self.python_kafka_client.create_kafka_admin_client() |
This file was deleted.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -29,14 +29,6 @@ def token(self): | |
|
||
|
||
class KafkaPythonClient(KafkaClient): | ||
def __init__(self, config, tls_context, log) -> None: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How come we wouldn't need this part anymore? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. because I moved it to the superclass https://github.com/DataDog/integrations-core/pull/14022/files#diff-414e25ef3b0e9d3114346cba1b8500545ef67e2bf3598b76b1c6d9b6f9dbcd7eR8-R14 There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah gotcha, missed that! |
||
self.config = config | ||
self.log = log | ||
self._kafka_client = None | ||
self._highwater_offsets = {} | ||
self._consumer_offsets = {} | ||
self._tls_context = tls_context | ||
|
||
def get_consumer_offsets(self): | ||
"""Fetch Consumer Group offsets from Kafka. | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
#!/bin/bash | ||
|
||
# TODO Remove this script once the library is installed at the agent level | ||
|
||
apt-get update | ||
apt-get install -y --no-install-recommends gcc git libssl-dev g++ make build-essential libsasl2-modules-gssapi-mit krb5-user | ||
cd /tmp && git clone https://github.com/edenhill/librdkafka.git | ||
cd librdkafka && git checkout tags/v2.0.2 | ||
./configure && make && make install && ldconfig | ||
cd ../ && rm -rf librdkafka | ||
pip install --no-binary confluent-kafka confluent-kafka |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
FYI I think we may need to remove
create_kafka_admin_client()
in thekafka_python_client.py
code eventually since that was vestigial from the legacy implementation but couldn't outright remove due to it being used in the tests. I'm ok with keeping this for now, but I don't think we need to implement a similar function inconfluent_kafka_client
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yeah I agree. I did not do it in this PR to focus on the global test structure. We can remove it on follow-up PRs :)