From 0071d6876624dc7074cc10742580de5e041cff93 Mon Sep 17 00:00:00 2001 From: Michael Grosser Date: Wed, 19 Jan 2022 16:50:14 -0800 Subject: [PATCH] Reload bearer token from disk to work with kubernetes bound-service-account-tokens --- .../base/checks/openmetrics/base_check.py | 2 ++ .../base/checks/openmetrics/mixins.py | 36 +++++++++---------- .../kube_apiserver_metrics.py | 6 ++++ .../kube_controller_manager.py | 4 +++ kubelet/datadog_checks/kubelet/kubelet.py | 4 +++ 5 files changed, 33 insertions(+), 19 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/checks/openmetrics/base_check.py b/datadog_checks_base/datadog_checks/base/checks/openmetrics/base_check.py index 2214d01ced394..b7041160d8d62 100644 --- a/datadog_checks_base/datadog_checks/base/checks/openmetrics/base_check.py +++ b/datadog_checks_base/datadog_checks/base/checks/openmetrics/base_check.py @@ -139,6 +139,8 @@ def check(self, instance): "You have to collect at least one metric from the endpoint: {}".format(scraper_config['prometheus_url']) ) + scraper_config['_bearer_token'] = self._get_bearer_token(scraper_config['bearer_token_auth'], scraper_config['bearer_token_path']) + self.process(scraper_config) def get_scraper_config(self, instance): diff --git a/datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py b/datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py index b4089e7cf4eb9..14b4c430e2e7d 100644 --- a/datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py +++ b/datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py @@ -398,32 +398,30 @@ def get_http_handler(self, scraper_config): The http handler is cached using `prometheus_url` as key. """ prometheus_url = scraper_config['prometheus_url'] - if prometheus_url in self._http_handlers: - return self._http_handlers[prometheus_url] + http_handler = self._http_handlers[prometheus_url] + if http_handler is None + # TODO: Deprecate this behavior in Agent 8 + if scraper_config['ssl_ca_cert'] is False: + scraper_config['ssl_verify'] = False - # TODO: Deprecate this behavior in Agent 8 - if scraper_config['ssl_ca_cert'] is False: - scraper_config['ssl_verify'] = False + # TODO: Deprecate this behavior in Agent 8 + if scraper_config['ssl_verify'] is False: + scraper_config.setdefault('tls_ignore_warning', True) - # TODO: Deprecate this behavior in Agent 8 - if scraper_config['ssl_verify'] is False: - scraper_config.setdefault('tls_ignore_warning', True) + http_handler = self._http_handlers[prometheus_url] = RequestsWrapper( + scraper_config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log + ) - http_handler = self._http_handlers[prometheus_url] = RequestsWrapper( - scraper_config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log - ) + # TODO: Determine if we really need this + http_handler.options['headers'].setdefault('accept-encoding', 'gzip') - headers = http_handler.options['headers'] + # Explicitly set the content type we accept + http_handler.options['headers'].setdefault('accept', 'text/plain') + # even when using the cached handler we need to re-set this token since it could have changed bearer_token = scraper_config['_bearer_token'] if bearer_token is not None: - headers['Authorization'] = 'Bearer {}'.format(bearer_token) - - # TODO: Determine if we really need this - headers.setdefault('accept-encoding', 'gzip') - - # Explicitly set the content type we accept - headers.setdefault('accept', 'text/plain') + http_handler.options['headers']['Authorization'] = 'Bearer {}'.format(bearer_token) return http_handler diff --git a/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py b/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py index 9f9ec4ea629bd..75441e15225ec 100644 --- a/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py +++ b/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py @@ -110,6 +110,12 @@ def check(self, instance): if not self.kube_apiserver_config['metrics_mapper']: url = self.kube_apiserver_config['prometheus_url'] raise CheckException("You have to collect at least one metric from the endpoint: {}".format(url)) + + # reload from path in case the token was refreshed + # see https://github.com/kubernetes/enhancements/blob/master/keps/sig-auth/1205-bound-service-account-tokens/README.md + # same as datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py + self.kube_apiserver_config['_bearer_token'] = self._get_bearer_token(self.kube_apiserver_config['bearer_token_auth'], self.kube_apiserver_config['bearer_token_path']) + self.process(self.kube_apiserver_config, metric_transformers=self.metric_transformers) def get_scraper_config(self, instance): diff --git a/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py b/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py index 33a3ef950a3f3..0a169d367aabd 100644 --- a/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py +++ b/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py @@ -142,6 +142,10 @@ def check(self, instance): # Get the configuration for this specific instance scraper_config = self.get_scraper_config(instance) + # TODO: load given token path when set + token = open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r').read().rstrip() + scraper_config['_bearer_token'] = token + # Populate the metric transformers dict transformers = {} limiters = self.DEFAUT_RATE_LIMITERS + instance.get("extra_limiters", []) diff --git a/kubelet/datadog_checks/kubelet/kubelet.py b/kubelet/datadog_checks/kubelet/kubelet.py index bc9a68ce4f565..bc2e2278997bb 100644 --- a/kubelet/datadog_checks/kubelet/kubelet.py +++ b/kubelet/datadog_checks/kubelet/kubelet.py @@ -344,6 +344,10 @@ def check(self, instance): self.process_stats_summary( self.pod_list_utils, self.stats, self.instance_tags, self.use_stats_summary_as_source ) + # TODO: load given token path when set + token = open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r').read().rstrip() + self.cadvisor_scraper_config['_bearer_token'] = token + self.kubelet_scraper_config['_bearer_token'] = token if self.cadvisor_legacy_url: # Legacy cAdvisor self.log.debug('processing legacy cadvisor metrics')