Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Import dependencies of base checks on demand #19781

Merged
merged 12 commits into from
Mar 7, 2025
13 changes: 13 additions & 0 deletions datadog_checks_base/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,19 @@ install the toolkit locally and play with it:
pip install datadog-checks-base
```

## Performance Optimizations

We strive to balance lean resource usage with a "batteries included" user experience.
This is why we import some of our dependencies inside functions that use them instead of the more conventional import section at the top of the file.

Below are some examples for how much we shave off the Python heap for a given dependency:

- `requests==2.32.3`: 3.6MB
- `RequestWrapper` class (`datadog_checks_base==37.7.0`): 2.9MB
- `prometheus-client==0.21.1`: around 1MB

This translates into even bigger savings when we run in the Agent, something close to 50MB.

## Troubleshooting

Need help? Contact [Datadog support][8].
Expand Down
1 change: 1 addition & 0 deletions datadog_checks_base/changelog.d/19781.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Import dependencies in base check classes lazily. With only the disk and network checks running, the Agent uses almost 50MB less memory after this change.
4 changes: 3 additions & 1 deletion datadog_checks_base/datadog_checks/base/checks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
from ..utils.common import ensure_bytes, to_native_string
from ..utils.diagnose import Diagnosis
from ..utils.fips import enable_fips
from ..utils.http import RequestsWrapper
from ..utils.limiter import Limiter
from ..utils.metadata import MetadataManager
from ..utils.secrets import SecretsSanitizer
Expand Down Expand Up @@ -396,6 +395,9 @@ def http(self):

Only new checks or checks on Agent 6.13+ can and should use this for HTTP requests.
"""
# See Performance Optimizations in this package's README.md.
from ..utils.http import RequestsWrapper

if not hasattr(self, '_http'):
self._http = RequestsWrapper(self.instance or {}, self.init_config, self.HTTP_CONFIG_REMAPPER, self.log)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@

from itertools import tee

from prometheus_client.metrics_core import Metric
from prometheus_client.parser import _parse_sample, _replace_help_escaping


def text_fd_to_metric_families(fd):
raw_lines, input_lines = tee(fd, 2)
Expand All @@ -32,6 +29,10 @@ def _parse_payload(fd):
Yields Metric's.
"""
# See Performance Optimizations in this package's README.md.
from prometheus_client.metrics_core import Metric
from prometheus_client.parser import _parse_sample, _replace_help_escaping

name = ''
documentation = ''
typ = 'untyped'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy

import requests

from ...errors import CheckException
from ...utils.tracing import traced_class
from .. import AgentCheck
Expand Down Expand Up @@ -74,6 +72,9 @@ def __init__(self, *args, **kwargs):
"""
The base class for any Prometheus-based integration.
"""
# See Performance Optimizations in this package's README.md.
import requests

args = list(args)
default_instances = kwargs.pop('default_instances', None) or {}
default_namespace = kwargs.pop('default_namespace', None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,11 @@
from os.path import isfile
from re import compile

import requests
from prometheus_client.samples import Sample

from datadog_checks.base.agent import datadog_agent

from ...config import is_affirmative
from ...errors import CheckException
from ...utils.common import to_native_string
from ...utils.http import RequestsWrapper
from .. import AgentCheck
from ..libs.prometheus import text_fd_to_metric_families

Expand Down Expand Up @@ -380,6 +376,10 @@ def get_http_handler(self, scraper_config):
The http handler is cached using `prometheus_url` as key.
The http handler doesn't use the cache if a bearer token is used to allow refreshing it.
"""

# See Performance Optimizations in this package's README.md.
from ...utils.http import RequestsWrapper

prometheus_url = scraper_config['prometheus_url']
bearer_token = scraper_config['_bearer_token']
if prometheus_url in self._http_handlers and bearer_token is None:
Expand Down Expand Up @@ -826,6 +826,9 @@ def poll(self, scraper_config, headers=None):
Custom headers can be added to the default headers.
"""
# See Performance Optimizations in this package's README.md.
import requests

endpoint = scraper_config.get('prometheus_url')

# Should we send a service check for when we make a request
Expand Down Expand Up @@ -1060,6 +1063,9 @@ def _decumulate_histogram_buckets(self, metric):
"""
Decumulate buckets in a given histogram metric and adds the lower_bound label (le being upper_bound)
"""
# See Performance Optimizations in this package's README.md.
from prometheus_client.samples import Sample

bucket_values_by_context_upper_bound = {}
for sample in metric.samples:
if sample[self.SAMPLE_NAME].endswith("_bucket"):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from collections import ChainMap
from contextlib import contextmanager

from requests.exceptions import RequestException

from ....errors import ConfigurationError
from ....utils.tracing import traced_class
from ... import AgentCheck
Expand Down Expand Up @@ -62,6 +60,9 @@ def check(self, _):
Another thing to note is that this check ignores its instance argument completely.
We take care of instance-level customization at initialization time.
"""
# See Performance Optimizations in this package's README.md.
from requests.exceptions import RequestException

self.refresh_scrapers()

for endpoint, scraper in self.scrapers.items():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,12 @@
from math import isinf, isnan
from typing import List # noqa: F401

from prometheus_client.openmetrics.parser import text_fd_to_metric_families as parse_openmetrics
from prometheus_client.parser import text_fd_to_metric_families as parse_prometheus
from requests.exceptions import ConnectionError

from datadog_checks.base.agent import datadog_agent

from ....config import is_affirmative
from ....constants import ServiceCheck
from ....errors import ConfigurationError
from ....utils.functions import no_op, return_true
from ....utils.http import RequestsWrapper
from .first_scrape_handler import first_scrape_handler
from .labels import LabelAggregator, get_label_normalizer
from .transform import MetricTransformer
Expand Down Expand Up @@ -50,6 +45,8 @@ def __init__(self, check, config):
"""
The base class for any scraper overrides.
"""
# See Performance Optimizations in this package's README.md.
from ....utils.http import RequestsWrapper

self.config = config

Expand Down Expand Up @@ -333,6 +330,10 @@ def parse_metrics(self):

@property
def parse_metric_families(self):
# See Performance Optimizations in this package's README.md.
from prometheus_client.openmetrics.parser import text_fd_to_metric_families as parse_openmetrics
from prometheus_client.parser import text_fd_to_metric_families as parse_prometheus

media_type = self._content_type.split(';')[0]
# Setting `use_latest_spec` forces the use of the OpenMetrics format, otherwise
# the format will be chosen based on the media type specified in the response's content-header.
Expand Down Expand Up @@ -393,6 +394,8 @@ def stream_connection_lines(self):
"""
Yield the connection line.
"""
# See Performance Optimizations in this package's README.md.
from requests.exceptions import ConnectionError

try:
with self.get_connection() as connection:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from prometheus_client.samples import Sample

NEGATIVE_INFINITY = float('-inf')

Expand All @@ -10,6 +9,9 @@ def decumulate_histogram_buckets(sample_data):
"""
Decumulate buckets in a given histogram metric and adds the lower_bound label (le being upper_bound)
"""
# See Performance Optimizations in this package's README.md.
from prometheus_client.samples import Sample

# TODO: investigate performance optimizations
new_sample_data = []
bucket_values_by_context_upper_bound = {}
Expand Down
Loading