Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reload bearer token from disk to work with kubernetes bound-service-a… #11176

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ def check(self, instance):
"You have to collect at least one metric from the endpoint: {}".format(scraper_config['prometheus_url'])
)

scraper_config['_bearer_token'] = self._get_bearer_token(scraper_config['bearer_token_auth'], scraper_config['bearer_token_path'])

self.process(scraper_config)

def get_scraper_config(self, instance):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -398,32 +398,30 @@ def get_http_handler(self, scraper_config):
The http handler is cached using `prometheus_url` as key.
"""
prometheus_url = scraper_config['prometheus_url']
if prometheus_url in self._http_handlers:
return self._http_handlers[prometheus_url]
http_handler = self._http_handlers[prometheus_url]
if http_handler is None
# TODO: Deprecate this behavior in Agent 8
if scraper_config['ssl_ca_cert'] is False:
scraper_config['ssl_verify'] = False

# TODO: Deprecate this behavior in Agent 8
if scraper_config['ssl_ca_cert'] is False:
scraper_config['ssl_verify'] = False
# TODO: Deprecate this behavior in Agent 8
if scraper_config['ssl_verify'] is False:
scraper_config.setdefault('tls_ignore_warning', True)

# TODO: Deprecate this behavior in Agent 8
if scraper_config['ssl_verify'] is False:
scraper_config.setdefault('tls_ignore_warning', True)
http_handler = self._http_handlers[prometheus_url] = RequestsWrapper(
scraper_config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log
)

http_handler = self._http_handlers[prometheus_url] = RequestsWrapper(
scraper_config, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log
)
# TODO: Determine if we really need this
http_handler.options['headers'].setdefault('accept-encoding', 'gzip')

headers = http_handler.options['headers']
# Explicitly set the content type we accept
http_handler.options['headers'].setdefault('accept', 'text/plain')

# even when using the cached handler we need to re-set this token since it could have changed
bearer_token = scraper_config['_bearer_token']
if bearer_token is not None:
headers['Authorization'] = 'Bearer {}'.format(bearer_token)

# TODO: Determine if we really need this
headers.setdefault('accept-encoding', 'gzip')

# Explicitly set the content type we accept
headers.setdefault('accept', 'text/plain')
http_handler.options['headers']['Authorization'] = 'Bearer {}'.format(bearer_token)

return http_handler

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,12 @@ def check(self, instance):
if not self.kube_apiserver_config['metrics_mapper']:
url = self.kube_apiserver_config['prometheus_url']
raise CheckException("You have to collect at least one metric from the endpoint: {}".format(url))

# reload from path in case the token was refreshed
# see https://github.com/kubernetes/enhancements/blob/master/keps/sig-auth/1205-bound-service-account-tokens/README.md
# same as datadog_checks_base/datadog_checks/base/checks/openmetrics/mixins.py
self.kube_apiserver_config['_bearer_token'] = self._get_bearer_token(self.kube_apiserver_config['bearer_token_auth'], self.kube_apiserver_config['bearer_token_path'])

self.process(self.kube_apiserver_config, metric_transformers=self.metric_transformers)

def get_scraper_config(self, instance):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ def check(self, instance):
# Get the configuration for this specific instance
scraper_config = self.get_scraper_config(instance)

# TODO: load given token path when set
token = open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r').read().rstrip()
scraper_config['_bearer_token'] = token

# Populate the metric transformers dict
transformers = {}
limiters = self.DEFAUT_RATE_LIMITERS + instance.get("extra_limiters", [])
Expand Down
4 changes: 4 additions & 0 deletions kubelet/datadog_checks/kubelet/kubelet.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,10 @@ def check(self, instance):
self.process_stats_summary(
self.pod_list_utils, self.stats, self.instance_tags, self.use_stats_summary_as_source
)
# TODO: load given token path when set
token = open('/var/run/secrets/kubernetes.io/serviceaccount/token', 'r').read().rstrip()
self.cadvisor_scraper_config['_bearer_token'] = token
self.kubelet_scraper_config['_bearer_token'] = token

if self.cadvisor_legacy_url: # Legacy cAdvisor
self.log.debug('processing legacy cadvisor metrics')
Expand Down