Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Drop source:kafka from tags. #4400

Merged
merged 1 commit into from
Aug 20, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions kafka_consumer/datadog_checks/kafka_consumer/legacy_0_10_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,10 @@ def check(self, instance):

# Report the metics
self._report_highwater_offsets()
self._report_consumer_offsets_and_lag(self._zk_consumer_offsets, 'zk')
self._report_consumer_offsets_and_lag(self._kafka_consumer_offsets, 'kafka')
self._report_consumer_offsets_and_lag(self._kafka_consumer_offsets)
# if someone is in the middle of migrating their offset storage from zookeeper to kafka, they need to identify
# which source is reporting which offsets. So we tag zookeeper with 'source:zk'
self._report_consumer_offsets_and_lag(self._zk_consumer_offsets, source='zk')

def _create_kafka_client(self):
kafka_conn_str = self.instance.get('kafka_connect_str')
Expand Down Expand Up @@ -265,15 +267,12 @@ def _report_highwater_offsets(self):
broker_tags.extend(self._custom_tags)
self.gauge('broker_offset', highwater_offset, tags=broker_tags)

def _report_consumer_offsets_and_lag(self, consumer_offsets, consumer_offsets_source):
def _report_consumer_offsets_and_lag(self, consumer_offsets, **kwargs):
"""Report the consumer group offsets and consumer lag."""
for (consumer_group, topic, partition), consumer_offset in consumer_offsets.items():
consumer_group_tags = [
'topic:%s' % topic,
'partition:%s' % partition,
'consumer_group:%s' % consumer_group,
'source:%s' % consumer_offsets_source,
]
consumer_group_tags = ['topic:%s' % topic, 'partition:%s' % partition, 'consumer_group:%s' % consumer_group]
if 'source' in kwargs:
consumer_group_tags.append('source:%s' % kwargs['source'])
consumer_group_tags.extend(self._custom_tags)
if partition in self._kafka_client.cluster.partitions_for_topic(topic):
# report consumer offset if the partition is valid because even if leaderless the consumer offset will
Expand Down
4 changes: 1 addition & 3 deletions kafka_consumer/tests/test_kafka_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ def test_check_kafka(aggregator, kafka_instance):
for mname in BROKER_METRICS:
aggregator.assert_metric(mname, tags=tags, at_least=1)
for mname in CONSUMER_METRICS:
aggregator.assert_metric(
mname, tags=tags + ["source:kafka", "consumer_group:{}".format(name)], at_least=1
)
aggregator.assert_metric(mname, tags=tags + ["consumer_group:{}".format(name)], at_least=1)

aggregator.assert_all_metrics_covered()