diff --git a/.azure-pipelines/templates/test-all-checks.yml b/.azure-pipelines/templates/test-all-checks.yml index 57668b80d2449..5e6c9a0b73e82 100644 --- a/.azure-pipelines/templates/test-all-checks.yml +++ b/.azure-pipelines/templates/test-all-checks.yml @@ -318,6 +318,9 @@ jobs: - checkName: redisdb displayName: Redis os: linux + - checkName: rethinkdb + displayName: RethinkDB + os: linux - checkName: riak displayName: Riak os: linux diff --git a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in index 8a42f1c144b4a..f5e685ca7e979 100644 --- a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in +++ b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in @@ -61,6 +61,7 @@ redis==3.3.11 requests==2.22.0 requests-kerberos==0.12.0 requests_ntlm==1.1.0 +rethinkdb==2.4.4 scandir==1.8 securesystemslib[crypto,pynacl]==0.14.2 selectors34==1.2.0; sys_platform == 'win32' and python_version < '3.4' diff --git a/datadog_checks_dev/datadog_checks/dev/docker.py b/datadog_checks_dev/datadog_checks/dev/docker.py index 22f261d2ae280..46e375c952f8f 100644 --- a/datadog_checks_dev/datadog_checks/dev/docker.py +++ b/datadog_checks_dev/datadog_checks/dev/docker.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os from contextlib import contextmanager +from typing import Iterator from six import string_types from six.moves.urllib.parse import urlparse @@ -246,3 +247,11 @@ def _read_example_logs_config(check_root): return option['example'] raise ValueError('No logs example found') + + +@contextmanager +def temporarily_stop_service(service, compose_file, check=True): + # type: (str, str, bool) -> Iterator[None] + run_command(['docker-compose', '-f', compose_file, 'stop', service], capture=False, check=check) + yield + run_command(['docker-compose', '-f', compose_file, 'start', service], capture=False, check=check) diff --git a/rethinkdb/CHANGELOG.md b/rethinkdb/CHANGELOG.md new file mode 100644 index 0000000000000..77e25dcd26c5c --- /dev/null +++ b/rethinkdb/CHANGELOG.md @@ -0,0 +1,2 @@ +# CHANGELOG - RethinkDB + diff --git a/rethinkdb/MANIFEST.in b/rethinkdb/MANIFEST.in new file mode 100644 index 0000000000000..6fa1c2388a4eb --- /dev/null +++ b/rethinkdb/MANIFEST.in @@ -0,0 +1,10 @@ +graft datadog_checks +graft tests + +include MANIFEST.in +include README.md +include requirements.in +include requirements-dev.txt +include manifest.json + +global-exclude *.py[cod] __pycache__ diff --git a/rethinkdb/README.md b/rethinkdb/README.md new file mode 100644 index 0000000000000..e9f382fff8292 --- /dev/null +++ b/rethinkdb/README.md @@ -0,0 +1,82 @@ +# Agent Check: RethinkDB + +## Overview + +[RethinkDB][1] is a distributed documented-oriented NoSQL database, with first class support for realtime change feeds. + +This check monitors a RethinkDB cluster through the Datadog Agent and collects metrics about performance, data availability, cluster configuration, and more. + +**Note**: this integration is compatible with RethinkDB **version 2.3.6 and above**. + +## Setup + +Follow the instructions below to install and configure this check for an Agent running on a host. For containerized environments, see the [Autodiscovery Integration Templates][2] for guidance on applying these instructions. + +### Installation + +The RethinkDB check is included in the [Datadog Agent][3] package. No additional installation is needed on your server. + +### Configuration + +1. If using RethinkDB 2.4+, add a `datadog-agent` user with read-only permissions on the `rethinkdb` database. You can use the following ReQL commands, and refer to [Permissions and user accounts][4] for details: + + ```python + r.db('rethinkdb').table('users').insert({'id': 'datadog-agent', 'password': ''}) + r.db('rethinkdb').grant('datadog-agent', {'read': True}) + ``` + + **Note**: on RethinkDB 2.3.x, granting permissions on the `rethinkdb` database is not supported. Skip this step and use your [admin account][5] below instead. + +2. Edit the `rethinkdb.d/conf.yaml` file in the `conf.d/` folder at the root of your [Agent's configuration directory][6]. See the [sample rethinkdb.d/conf.yaml][7] for all available configuration options. + + ```yaml + init_config: + + instances: + - host: localhost + port: 28015 + user: "" + password: "" + ``` + +3. [Restart the Agent][8]. + +**Note**: this integration collects metrics from all servers in the cluster, so you only need a single Agent. + +### Validation + +[Run the Agent's status subcommand][9] and look for `rethinkdb` under the Checks section. + +## Data Collected + +### Metrics + +See [metadata.csv][10] for a list of metrics provided by this check. + +### Service Checks + +- `rethinkdb.can_connect`: Returns `CRITICAL` if the Agent cannot reach the configured RethinkDB server, `OK` otherwise. +- `rethinkdb.table_status.status.ready_for_outdated_reads`: Returns `OK` if all shards of a table are ready to accept outdated read queries, `WARNING` otherwise. +- `rethinkdb.table_status.status.ready_for_reads`: Returns `OK` if all shards of a table are ready to accept read queries, `WARNING` otherwise. +- `rethinkdb.table_status.status.ready_for_writes`: Returns `OK` if all shards of a table are ready to accept write queries, `WARNING` otherwise. +- `rethinkdb.table_status.status.all_replicas_ready`: Returns `OK` if all replicas are ready for reads and writes, `WARNING` otherwise (e.g. if backfills are in progress). + +### Events + +RethinkDB does not include any events. + +## Troubleshooting + +Need help? Contact [Datadog support][11]. + +[1]: https://rethinkdb.com/ +[2]: https://docs.datadoghq.com/agent/autodiscovery/integrations +[3]: https://docs.datadoghq.com/agent +[4]: https://rethinkdb.com/docs/permissions-and-accounts/ +[5]: https://rethinkdb.com/docs/security/#the-admin-account +[6]: https://docs.datadoghq.com/agent/guide/agent-configuration-files/#agent-configuration-directory +[7]: https://github.com/DataDog/integrations-core/blob/master/rethinkdb/datadog_checks/rethinkdb/data/conf.yaml.example +[8]: https://docs.datadoghq.com/agent/guide/agent-commands/#start-stop-and-restart-the-agent +[9]: https://docs.datadoghq.com/agent/guide/agent-commands/#agent-status-and-information +[10]: https://github.com/DataDog/integrations-core/blob/master/rethinkdb/metadata.csv +[11]: https://docs.datadoghq.com/help diff --git a/rethinkdb/assets/configuration/spec.yaml b/rethinkdb/assets/configuration/spec.yaml new file mode 100644 index 0000000000000..ecc79d4629ccb --- /dev/null +++ b/rethinkdb/assets/configuration/spec.yaml @@ -0,0 +1,44 @@ +name: RethinkDB + +files: + - name: rethinkdb.yaml + + options: + - template: init_config + options: + - template: init_config/default + + - template: instances + options: + - name: host + required: true + description: Host of the RethinkDB server. + value: + example: localhost + type: string + + - name: port + required: true + description: Driver port of the RethinkDB server. + value: + example: 28015 + type: integer + + - name: username + description: The user account to connect as. + value: + type: string + + - name: password + description: The password for the user account to connect as. + value: + type: string + + - name: tls_ca_cert + description: | + Path to a TLS client certificate to use when connecting to the RethinkDB server. + See also: https://rethinkdb.com/docs/security/#using-tls + value: + type: string + + - template: instances/default diff --git a/rethinkdb/assets/service_checks.json b/rethinkdb/assets/service_checks.json new file mode 100644 index 0000000000000..0c23cb2e530ba --- /dev/null +++ b/rethinkdb/assets/service_checks.json @@ -0,0 +1,77 @@ +[ + { + "agent_version": "6.19.0", + "integration": "RethinkDB", + "groups": [ + "host", + "port" + ], + "check": "rethinkdb.can_connect", + "statuses": [ + "ok", + "critical" + ], + "name": "Can Connect", + "description": "Returns `CRITICAL` if the Agent is unable to reach the configured RethinkDB server, `OK` otherwise." + }, + { + "agent_version": "6.19.0", + "integration": "RethinkDB", + "groups": [ + "database", + "table" + ], + "check": "rethinkdb.table_status.status.ready_for_outdated_reads", + "statuses": [ + "ok", + "warning" + ], + "name": "Table Ready For Outdated Reads", + "description": "Returns `OK` if all shards of a table are ready to accept outdated read queries, `WARNING` otherwise." + }, + { + "agent_version": "6.19.0", + "integration": "RethinkDB", + "groups": [ + "database", + "table" + ], + "check": "rethinkdb.table_status.status.ready_for_reads", + "statuses": [ + "ok", + "warning" + ], + "name": "Table Ready For Reads", + "description": "Returns `OK` if all shards of a table are ready to accept read queries, `WARNING` otherwise." + }, + { + "agent_version": "6.19.0", + "integration": "RethinkDB", + "groups": [ + "database", + "table" + ], + "check": "rethinkdb.table_status.status.ready_for_writes", + "statuses": [ + "ok", + "warning" + ], + "name": "Table Ready For Writes", + "description": "Returns `OK` if all shards of a table are ready to accept write queries, `WARNING` otherwise." + }, + { + "agent_version": "6.19.0", + "integration": "RethinkDB", + "groups": [ + "database", + "table" + ], + "check": "rethinkdb.table_status.status.all_replicas_ready", + "statuses": [ + "ok", + "warning" + ], + "name": "Table All Replicas Ready", + "description": "Returns `OK` if all replicas are ready for reads and writes, `WARNING` otherwise (e.g. if backfills are in progress)." + } +] diff --git a/rethinkdb/datadog_checks/__init__.py b/rethinkdb/datadog_checks/__init__.py new file mode 100644 index 0000000000000..cdddf032324d5 --- /dev/null +++ b/rethinkdb/datadog_checks/__init__.py @@ -0,0 +1,4 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/rethinkdb/datadog_checks/rethinkdb/__about__.py b/rethinkdb/datadog_checks/rethinkdb/__about__.py new file mode 100644 index 0000000000000..e675c84da2568 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/__about__.py @@ -0,0 +1,4 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +__version__ = '0.0.1' diff --git a/rethinkdb/datadog_checks/rethinkdb/__init__.py b/rethinkdb/datadog_checks/rethinkdb/__init__.py new file mode 100644 index 0000000000000..1f5edac2932e8 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/__init__.py @@ -0,0 +1,7 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from .__about__ import __version__ +from .check import RethinkDBCheck + +__all__ = ['__version__', 'RethinkDBCheck'] diff --git a/rethinkdb/datadog_checks/rethinkdb/check.py b/rethinkdb/datadog_checks/rethinkdb/check.py new file mode 100644 index 0000000000000..5656857539625 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/check.py @@ -0,0 +1,115 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from contextlib import contextmanager +from typing import Any, Callable, Iterator, Sequence, cast + +import rethinkdb + +from datadog_checks.base import AgentCheck + +from . import operations, queries +from .config import Config +from .document_db import DocumentQuery +from .document_db.types import Metric +from .types import Instance +from .version import parse_version + + +class RethinkDBCheck(AgentCheck): + """ + Collect metrics from a RethinkDB cluster. + """ + + SERVICE_CHECK_CONNECT = 'rethinkdb.can_connect' + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + super(RethinkDBCheck, self).__init__(*args, **kwargs) + + self.config = Config(cast(Instance, self.instance)) + + if self.config.password: + self.register_secret(self.config.password) + + self.queries = ( + queries.config_summary, + queries.cluster_statistics, + queries.server_statistics, + queries.table_statistics, + queries.replica_statistics, + queries.table_statuses, + queries.server_statuses, + queries.jobs_summary, + queries.current_issues_summary, + ) # type: Sequence[DocumentQuery] + + @contextmanager + def connect_submitting_service_checks(self): + # type: () -> Iterator[rethinkdb.net.Connection] + config = self.config + tags = config.service_check_tags + + try: + with rethinkdb.r.connect( + host=config.host, + port=config.port, + user=config.user, + password=config.password, + ssl={'ca_certs': config.tls_ca_cert} if config.tls_ca_cert is not None else {}, + ) as conn: + yield conn + except rethinkdb.errors.ReqlDriverError as exc: + message = 'Could not connect to RethinkDB server: {!r}'.format(exc) + self.log.error(message) + self.service_check(self.SERVICE_CHECK_CONNECT, self.CRITICAL, tags=tags, message=message) + raise + except Exception as exc: + message = 'Unexpected error while executing RethinkDB check: {!r}'.format(exc) + self.log.error(message) + self.service_check(self.SERVICE_CHECK_CONNECT, self.CRITICAL, tags=tags, message=message) + raise + else: + self.service_check(self.SERVICE_CHECK_CONNECT, self.OK, tags=tags) + + def collect_metrics(self, conn): + # type: (rethinkdb.net.Connection) -> Iterator[Metric] + """ + Collect metrics from the RethinkDB cluster we are connected to. + """ + for query in self.queries: + for metric in query.run(logger=self.log, conn=conn, config=self.config): + yield metric + + def submit_metric(self, metric): + # type: (Metric) -> None + submit = getattr(self, metric['type']) # type: Callable + submit(metric['name'], metric['value'], tags=self.config.tags + metric['tags']) + + def submit_version_metadata(self, conn): + # type: (rethinkdb.net.Connection) -> None + try: + raw_version = operations.get_connected_server_raw_version(conn) + except Exception as exc: + self.log.error('Error collecting version metadata: %s', exc) + return + + if raw_version is None: + return + + try: + version = parse_version(raw_version) + except ValueError as exc: + self.log.error('Failed to parse version: %s', exc) + return + + self.set_metadata('version', version) + + def check(self, instance): + # type: (Any) -> None + with self.connect_submitting_service_checks() as conn: + for metric in self.collect_metrics(conn): + self.submit_metric(metric) + + if self.is_metadata_collection_enabled(): + self.submit_version_metadata(conn) diff --git a/rethinkdb/datadog_checks/rethinkdb/config.py b/rethinkdb/datadog_checks/rethinkdb/config.py new file mode 100644 index 0000000000000..e2b51ab160716 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/config.py @@ -0,0 +1,50 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import List, Optional + +from datadog_checks.base import ConfigurationError + +from .types import Instance + + +class Config(object): + """ + Hold instance configuration for a RethinkDB check. + + Encapsulates the validation of an `instance` dictionary while improving type information. + """ + + def __init__(self, instance=None): + # type: (Instance) -> None + if instance is None: + instance = {} + + host = instance.get('host', 'localhost') + port = instance.get('port', 28015) + user = instance.get('username') + password = instance.get('password') + tls_ca_cert = instance.get('tls_ca_cert') + tags = instance.get('tags', []) + + if not isinstance(host, str): + raise ConfigurationError('host {!r} must be a string (got {!r})'.format(host, type(host))) + + try: + port = int(port) + except (ValueError, TypeError): + raise ConfigurationError('port {!r} must be convertible to an integer (got {!r})'.format(port, type(port))) + + if port < 0: + raise ConfigurationError('port must be positive (got {!r})'.format(port)) + + if not isinstance(tags, list): + raise ConfigurationError('tags {!r} must be a list (got {!r})'.format(tags, type(tags))) + + self.host = host # type: str + self.port = port # type: int + self.user = user # type: Optional[str] + self.password = password # type: Optional[str] + self.tls_ca_cert = tls_ca_cert # type: Optional[str] + self.tags = tags # type: List[str] + self.service_check_tags = ('host:{}'.format(self.host), 'port:{}'.format(self.port)) + tuple(self.tags) diff --git a/rethinkdb/datadog_checks/rethinkdb/data/conf.yaml.example b/rethinkdb/datadog_checks/rethinkdb/data/conf.yaml.example new file mode 100644 index 0000000000000..bfa0b51a582bd --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/data/conf.yaml.example @@ -0,0 +1,69 @@ +## All options defined here are available to all instances. +# +init_config: + + ## @param service - string - optional + ## Attach the tag `service:` to every metric, event, and service check emitted by this integration. + ## + ## Additionally, this sets the default `service` for every log source. + # + # service: + +## Every instance is scheduled independent of the others. +# +instances: + + ## @param host - string - required + ## Host of the RethinkDB server. + # + - host: localhost + + ## @param port - integer - required + ## Driver port of the RethinkDB server. + # + port: 28015 + + ## @param username - string - optional + ## The user account to connect as. + # + # username: + + ## @param password - string - optional + ## The password for the user account to connect as. + # + # password: + + ## @param tls_ca_cert - string - optional + ## Path to a TLS client certificate to use when connecting to the RethinkDB server. + ## See also: https://rethinkdb.com/docs/security/#using-tls + # + # tls_ca_cert: + + ## @param tags - list of strings - optional + ## A list of tags to attach to every metric and service check emitted by this instance. + ## + ## Learn more about tagging at https://docs.datadoghq.com/tagging + # + # tags: + # - : + # - : + + ## @param service - string - optional + ## Attach the tag `service:` to every metric, event, and service check emitted by this integration. + ## + ## Overrides any `service` defined in the `init_config` section. + # + # service: + + ## @param min_collection_interval - number - optional - default: 15 + ## This changes the collection interval of the check. For more information, see: + ## https://docs.datadoghq.com/developers/write_agent_check/#collection-interval + # + # min_collection_interval: 15 + + ## @param empty_default_hostname - boolean - optional - default: false + ## This forces the check to send metrics with no hostname. + ## + ## This is useful for cluster-level checks. + # + # empty_default_hostname: false diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/__init__.py b/rethinkdb/datadog_checks/rethinkdb/document_db/__init__.py new file mode 100644 index 0000000000000..19f6ee93f9a16 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/__init__.py @@ -0,0 +1,6 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from .query import DocumentQuery + +__all__ = ['DocumentQuery'] diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/_example.py b/rethinkdb/datadog_checks/rethinkdb/document_db/_example.py new file mode 100644 index 0000000000000..cce4088b981e7 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/_example.py @@ -0,0 +1,47 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from datadog_checks.rethinkdb.document_db import DocumentQuery, transformers + + +def make_fake_query(): + # type: () -> list + # These documents would typically come from calls to a database client library. + document = { + 'memory': {'total_mb': 1000}, + 'disk_used_bytes_mb': 2500, + 'cpus': [{'usage': 50}, {'usage': 10}], + 'threads_per_process': {'server': 12, 'worker': 4}, + } + + # You may construct these tags from data retrieved from the database. + tags = ['db:main'] + + # Return any number of document/tags pairs. + # Note: yield syntax is supported too, eg `yield (document, tags)`. + return [(document, tags)] + + +query = DocumentQuery( + source=make_fake_query, + name='system_usage', + prefix='system', + metrics=[ + {'type': 'gauge', 'path': 'memory.total_mb'}, + {'type': 'gauge', 'path': 'disk_used_bytes_mb'}, + {'type': 'gauge', 'path': 'cpus', 'name': 'cpus.total', 'transformer': transformers.length}, + ], + enumerations=[{'path': 'cpus', 'index_tag': 'cpu', 'metrics': [{'type': 'gauge', 'path': 'usage'}]}], + groups=[{'type': 'gauge', 'path': 'threads_per_process', 'key_tag': 'process'}], +) + + +assert list(query.run()) == [ + {'type': 'gauge', 'name': 'system.memory.total_mb', 'value': 1000, 'tags': ['db:main']}, + {'type': 'gauge', 'name': 'system.disk_used_bytes_mb', 'value': 2500, 'tags': ['db:main']}, + {'type': 'gauge', 'name': 'system.cpus.total', 'value': 2, 'tags': ['db:main']}, + {'type': 'gauge', 'name': 'system.cpus.usage', 'value': 50, 'tags': ['db:main', 'cpu:0']}, + {'type': 'gauge', 'name': 'system.cpus.usage', 'value': 10, 'tags': ['db:main', 'cpu:1']}, + {'type': 'gauge', 'name': 'system.threads_per_process', 'value': 12, 'tags': ['db:main', 'process:server']}, + {'type': 'gauge', 'name': 'system.threads_per_process', 'value': 4, 'tags': ['db:main', 'process:worker']}, +] diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/query.py b/rethinkdb/datadog_checks/rethinkdb/document_db/query.py new file mode 100644 index 0000000000000..dc76b932f3580 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/query.py @@ -0,0 +1,131 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import logging +from typing import Any, Callable, Iterable, Iterator, List, Mapping, Sequence, Tuple, Union + +from .types import Enumeration, Group, Metric, MetricSpec +from .utils import dotted_join, lookup_dotted, null_logger + + +class DocumentQuery(object): + """ + A generic helper for retrieving metrics from document-oriented ("JSON") databases. + + Example + ------- + See: + https://github.com/DataDog/integrations-core/blob/master/rethinkdb/datadog_checks/rethinkdb/document_db/_example.py + + Parameters + ---------- + source: + A callable that returns an iterable of `(document, tags)` pairs. + * Should accept the same `**kwargs` than what will be passed to `.run()`. + * `tags` will be applied to all metrics built from the corresponding `document`. + * All documents should have the same structure. + name: + A verbose name for the query, for logging purposes. Example: `'memory_usage'`. + prefix: + Will be prepended to all metric names. Example: `'my_integration.memory'`. + metrics: + Each item in this list corresponds to a metric that will be submitted to Datadog. + * `type` (required): metric type. Example: `'gauge'`. + * `path` (required): dotted path to the value of interest in a `document`. Example: `'memory_usage.memory_mb'`. + * `name`: an explicit metric name. If not set, the `path` is used. Example: `'memory_consumption'`. + * `transformer`: a callable applied to metric values before submission. See `document_db.transformers` for + built-in transformers. + enumerations: + Each item in this list corresponds to a set of metrics built from items in a JSON array. + The name comes from the `enumerate()` Python built-in, as enumerations allow tagging by index in the array. + * `path` (required): dotted path to the array of interest in a `document`. + * `index_tag` (required): indexes will be attached as this tag. Example: `'cpu_position'`. + * `metrics` (required): a list of metrics -- same structure as the `metrics` parameter. One copy will be + submitted for each item in the array. The enumeration `path` is automatically prepended to each metric `path`. + groups: + Each item in this list corresponds to a metric built from a JSON object (mapping) that represents aggregated + results, such as those returned by a GROUP BY operation. One copy of the metric will be submitted for each + key/value item in the mapping. + Keys: + * `path` (required): dotted path to the mapping of interest in a `document`. + * `key_tag` (required): keys of the mapping will be submitted as this tag. Example: `'country'`. + * `value_type` (required): metric type of values in the mapping. Example: `'gauge'`. + """ + + def __init__( + self, + source, # type: Callable[..., Iterable[Tuple[Any, List[str]]]] + name, # type: str + prefix, # type: str + metrics=None, # type: List[MetricSpec] + enumerations=None, # type: List[Enumeration] + groups=None, # type: List[Group] + ): + self.source = source + self.name = name + self.prefix = prefix + self.metrics = [] if metrics is None else metrics + self.enumerations = [] if enumerations is None else enumerations + self.groups = [] if groups is None else groups + + def _make_metric_from_spec(self, document, spec, tags): + # type: (Any, MetricSpec, List[str]) -> Metric + path = spec['path'] + name = spec.get('name', path) + value = lookup_dotted(document, path=path) + + if 'transformer' in spec and spec['transformer'] is not None: + value = spec['transformer'](value) + + if not isinstance(value, (int, float)): # pragma: no cover + raise RuntimeError('Expected float or int, got {!r} of type {}', value, type(value)) + + name = dotted_join((self.prefix, name)) + + return {'type': spec['type'], 'name': name, 'value': value, 'tags': tags} + + def _make_metrics_from_enumeration(self, document, enumeration, tags): + # type: (Any, Enumeration, List[str]) -> Iterator[Metric] + values = lookup_dotted(document, path=enumeration['path']) # type: Sequence + + for index, value in enumerate(values): + item_tags = tags + ['{}:{}'.format(enumeration['index_tag'], index)] + + for spec in enumeration['metrics']: + spec = spec.copy() + spec['name'] = dotted_join((enumeration['path'], spec['path'])) + yield self._make_metric_from_spec(value, spec, tags=item_tags) + + def _make_metrics_from_group(self, document, group, tags): + # type: (Any, Group, List[str]) -> Iterator[Metric] + mapping = lookup_dotted(document, path=group['path']) # type: Mapping + + for key in mapping: + item_tags = tags + ['{}:{}'.format(group['key_tag'], key)] + spec = { + 'type': group['type'], + 'name': group['path'], + 'path': key, + } # type: MetricSpec + yield self._make_metric_from_spec(mapping, spec, tags=item_tags) + + def run(self, logger=None, **kwargs): + # type: (Union[logging.Logger, logging.LoggerAdapter], **Any) -> Iterator[Metric] + if logger is None: + logger = null_logger # For convenience in unit tests and example scripts. + + logger.debug('document_query %s', self.name) + + for document, tags in self.source(**kwargs): + logger.debug('%s %r', self.name, document) + + for spec in self.metrics: + yield self._make_metric_from_spec(document, spec, tags=tags) + + for enumeration in self.enumerations: + for metric in self._make_metrics_from_enumeration(document, enumeration, tags=tags): + yield metric + + for group in self.groups: + for metric in self._make_metrics_from_group(document, group, tags=tags): + yield metric diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/transformers.py b/rethinkdb/datadog_checks/rethinkdb/document_db/transformers.py new file mode 100644 index 0000000000000..791485dbe17fd --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/transformers.py @@ -0,0 +1,29 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +""" +Built-in value transformers. +""" +import datetime as dt +from typing import Any, Sequence + +from datadog_checks.base import AgentCheck +from datadog_checks.base.types import ServiceCheckStatus +from datadog_checks.base.utils.db.utils import normalize_datetime + + +def length(value): + # type: (Sequence) -> int + return len(value) + + +def to_time_elapsed(datetime): + # type: (dt.datetime) -> float + datetime = normalize_datetime(datetime) + elapsed = dt.datetime.now(datetime.tzinfo) - datetime + return elapsed.total_seconds() + + +def ok_warning(value): + # type: (Any) -> ServiceCheckStatus + return AgentCheck.OK if value else AgentCheck.WARNING diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/types.py b/rethinkdb/datadog_checks/rethinkdb/document_db/types.py new file mode 100644 index 0000000000000..f617911a62bdd --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/types.py @@ -0,0 +1,22 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Any, Callable, List, Literal, Optional, TypedDict, Union + +MetricType = Literal['gauge', 'count', 'monotonic_count', 'rate', 'service_check'] +Metric = TypedDict('Metric', {'type': MetricType, 'name': str, 'value': float, 'tags': List[str]}) + +MetricSpec = TypedDict( + 'MetricSpec', + { + 'type': MetricType, + 'path': str, # Used as the default name. + 'name': str, # An explicit name for the metric. + 'transformer': Optional[Callable[[Any], Union[int, float]]], + }, + total=False, +) + +Enumeration = TypedDict('Enumeration', {'path': str, 'index_tag': str, 'metrics': List[MetricSpec]}) + +Group = TypedDict('Group', {'type': MetricType, 'path': str, 'key_tag': str}) diff --git a/rethinkdb/datadog_checks/rethinkdb/document_db/utils.py b/rethinkdb/datadog_checks/rethinkdb/document_db/utils.py new file mode 100644 index 0000000000000..0c35c4d7b069b --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/document_db/utils.py @@ -0,0 +1,50 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +""" +Miscellaneous utilities. +""" +import logging +from typing import Any, Mapping, Sequence + +null_logger = logging.getLogger('null') +null_logger.addHandler(logging.NullHandler()) + + +def lookup_dotted(dct, path): + # type: (Mapping, str) -> Any + """ + Given a mapping and a dotted path `key1.key2...keyN`, return the item at `dct[key1][key2]...[keyN]`. + + Raises `ValueError` if an issue is encountered while traversing `path`. + """ + if not path: + return dct + + keys = [key for key in reversed(path.split('.'))] + value = dct + + while keys: + if not isinstance(value, Mapping): + raise ValueError( + 'followed path {!r} with remaining keys {!r}, but value {!r} is not a mapping'.format(path, keys, value) + ) + + key = keys.pop() + + try: + value = value[key] + except KeyError as exc: + raise ValueError('Failed to retrieve key {!r} on value {!r}: {!r}'.format(key, value, exc)) + + return value + + +def dotted_join(values): + # type: (Sequence[str]) -> str + return '.'.join(filter(None, values)) + + +def no_op(*args, **kwargs): + # type: (*Any, **Any) -> None + pass diff --git a/rethinkdb/datadog_checks/rethinkdb/operations.py b/rethinkdb/datadog_checks/rethinkdb/operations.py new file mode 100644 index 0000000000000..cccb21a2cc840 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/operations.py @@ -0,0 +1,259 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +""" +Definition of high-level RethinkDB operations used by the RethinkDB check. + +Python ReQL reference documentation: https://rethinkdb.com/api/python/ +""" + +from typing import Any, Iterator, List, Mapping, Optional, Tuple + +import rethinkdb + +from .config import Config +from .types import ( + ClusterStats, + ConfigSummary, + ConnectionServer, + CurrentIssuesSummary, + JobSummary, + JoinRow, + ReplicaStats, + Server, + ServerStats, + ServerStatus, + ShardReplica, + Table, + TableStats, + TableStatus, +) + +# The usual entrypoint for building ReQL queries. +r = rethinkdb.r + +# All system tables are located in this database. +# See: https://rethinkdb.com/docs/system-tables/ +system = r.db('rethinkdb') + + +def get_connected_server_raw_version(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Optional[str] + """ + Return the RethinkDB version used by the server at the other end of the connection, in raw string format. + """ + # See: https://rethinkdb.com/docs/system-tables/#server_status + server = conn.server() # type: ConnectionServer + server_status = system.table('server_status').get(server['id']).run(conn) # type: Optional[ServerStatus] + + if server_status is None: + if server['proxy']: + # Proxies don't have an entry in the `server_status` table. + return None + else: # pragma: no cover + raise RuntimeError('Expected a `server_status` entry for server {!r}, got none'.format(server)) + + return server_status['process']['version'] + + +def get_config_summary(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[ConfigSummary, List[str]]] + """ + Return a summary of the cluster configuration. + """ + table_config = system.table('table_config') + server_config = system.table('server_config') + db_config = system.table('db_config') + + # Need to `.run()` these separately because ReQL does not support putting grouped data in raw expressions yet. + # See: https://github.com/rethinkdb/rethinkdb/issues/2067 + + tables_per_database = table_config.group('db').count().run(conn) # type: Mapping[str, int] + + secondary_indexes_per_table = ( + # NOTE: this is an example of a map-reduce query. + # See: https://rethinkdb.com/docs/map-reduce/#a-more-complex-example + table_config.pluck('name', 'indexes') + .concat_map(lambda row: row['indexes'].map(lambda _: {'table': row['name']})) + .group('table') + .count() + .run(conn) + ) # type: Mapping[str, int] + + summary = { + 'servers': server_config.count(), + 'databases': db_config.count(), + 'tables_per_database': tables_per_database, + 'secondary_indexes_per_table': secondary_indexes_per_table, + } # type: ConfigSummary # Enforce keys to match. + + yield r.expr(summary).run(conn), [] + + +def get_cluster_statistics(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[ClusterStats, List[str]]] + """ + Retrieve statistics about the cluster. + """ + yield system.table('stats').get(['cluster']).run(conn), [] + + +def get_servers_statistics(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[ServerStats, List[str]]] + """ + Retrieve statistics about each server in the cluster. + """ + # For servers: stats['id'] = ['server', ''] + is_server_stats_row = r.row['id'].nth(0) == 'server' + server_id = r.row['id'].nth(1) + + stats = system.table('stats') + server_config = system.table('server_config') + + rows = stats.filter(is_server_stats_row).eq_join(server_id, server_config).run(conn) # type: Iterator[JoinRow] + + for row in rows: + server_stats = row['left'] # type: ServerStats + server = row['right'] # type: Server + tags = ['server:{}'.format(server['name'])] + tags.extend(server['tags']) + yield server_stats, tags + + +def get_tables_statistics(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[TableStats, List[str]]] + """ + Retrieve statistics about each table in the cluster. + """ + # For tables: stats['id'] = ['table', ''] + is_table_stats_row = r.row['id'].nth(0) == 'table' + table_id = r.row['id'].nth(1) + + stats = system.table('stats') + table_config = system.table('table_config') + + rows = stats.filter(is_table_stats_row).eq_join(table_id, table_config).run(conn) # type: Iterator[JoinRow] + + for row in rows: + table_stats = row['left'] # type: TableStats + table = row['right'] # type: Table + tags = ['table:{}'.format(table['name']), 'database:{}'.format(table['db'])] + yield table_stats, tags + + +def get_replicas_statistics(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[ReplicaStats, List[str]]] + """ + Retrieve statistics about each replica (table/server pair) in the cluster. + """ + # NOTE: To reduce bandwidth usage, we make heavy use of the `.pluck()` operation, i.e. ask RethinkDB + # for a specific set of fields, instead of sending entire objects, which can be expensive when joining + # data as we do here. + # See: https://rethinkdb.com/api/python/pluck/ + + stats = system.table('stats') + server_config = system.table('server_config') + table_config = system.table('table_config') + table_status = system.table( + 'table_status', + # Required so that we can join on 'server_config' below without having to look up UUIDs from names. + # See: https://rethinkdb.com/api/python/table/#description + identifier_format='uuid', + ) + + query = ( + # Start from table statuses, as they contain the list of replicas for each shard of the table. + # See: https://rethinkdb.com/docs/system-tables/#table_status + table_status.pluck('id', {'shards': ['replicas']}) + .merge({'table': r.row['id']}) + .without('id') + # Flatten each table status entry into one entry per shard and replica. + .concat_map(lambda row: row['shards'].map(lambda shard: row.merge(shard.pluck('replicas')))) + .without('shards') + .concat_map( + lambda row: (row['replicas'].map(lambda replica: row.merge({'replica': replica.pluck('server', 'state')}))) + ) + .without('replicas') + # Grab table information for each replica. + # See: https://rethinkdb.com/docs/system-tables#table_config + .merge({'table': table_config.get(r.row['table']).pluck('id', 'db', 'name')}) + # Grab server information for each replica. + # See: https://rethinkdb.com/docs/system-tables#server_config + .merge({'server': server_config.get(r.row['replica']['server'])}) + .filter(r.row['server']) # Skip replicas stored on disconnected servers. + .merge({'server': r.row['server'].pluck('id', 'name', 'tags')}) + # Grab statistics for each replica. + # See: https://rethinkdb.com/docs/system-stats/#replica-tableserver-pair + .merge( + { + 'stats': stats.get(['table_server', r.row['table']['id'], r.row['server']['id']]).pluck( + 'query_engine', 'storage_engine' + ), + } + ) + ) + + rows = query.run(conn) # type: Iterator[Mapping[str, Any]] + + for row in rows: + table = row['table'] # type: Table + server = row['server'] # type: Server + replica = row['replica'] # type: ShardReplica + replica_stats = row['stats'] # type: ReplicaStats + + tags = [ + 'table:{}'.format(table['name']), + 'database:{}'.format(table['db']), + 'server:{}'.format(server['name']), + 'state:{}'.format(replica['state']), + ] + tags.extend(server['tags']) + + yield replica_stats, tags + + +def get_table_statuses(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[TableStatus, List[str]]] + """ + Retrieve the status of each table in the cluster. + """ + for table_status in system.table('table_status').run(conn): # type: TableStatus + tags = ['table:{}'.format(table_status['name']), 'database:{}'.format(table_status['db'])] + yield table_status, tags + + +def get_server_statuses(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[ServerStatus, List[str]]] + """ + Retrieve the status of each server in the cluster. + """ + for server_status in system.table('server_status').run(conn): # type: ServerStatus + tags = ['server:{}'.format(server_status['name'])] + yield server_status, tags + + +def get_jobs_summary(conn, config, **kwargs): + # type: (rethinkdb.net.Connection, Config, **Any) -> Iterator[Tuple[JobSummary, List[str]]] + """ + Retrieve a summary of system jobs currently running in the cluster. + """ + jobs_per_type = system.table('jobs').group('type').count().run(conn) + yield {'jobs': jobs_per_type}, [] + + +def get_current_issues_summary(conn, **kwargs): + # type: (rethinkdb.net.Connection, **Any) -> Iterator[Tuple[CurrentIssuesSummary, List[str]]] + """ + Retrieve a summary of problems detected within the cluster. + """ + current_issues = system.table('current_issues').pluck('type', 'critical') + + # NOTE: Need to `.run()` these separately because ReQL does not support putting grouped data in raw + # expressions yet. See: https://github.com/rethinkdb/rethinkdb/issues/2067 + + issues_by_type = current_issues.group('type').count().run(conn) # type: Mapping[str, int] + critical_issues_by_type = ( + current_issues.filter(r.row['critical']).group('type').count().run(conn) + ) # type: Mapping[str, int] + + yield {'issues': issues_by_type, 'critical_issues': critical_issues_by_type}, [] diff --git a/rethinkdb/datadog_checks/rethinkdb/py.typed b/rethinkdb/datadog_checks/rethinkdb/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/rethinkdb/datadog_checks/rethinkdb/queries.py b/rethinkdb/datadog_checks/rethinkdb/queries.py new file mode 100644 index 0000000000000..d449cd8f40f95 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/queries.py @@ -0,0 +1,148 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from . import operations +from .document_db import DocumentQuery, transformers + +# System configuration. + +# See: https://rethinkdb.com/docs/system-tables/#configuration-tables +config_summary = DocumentQuery( + source=operations.get_config_summary, + name='config_summary', + prefix='rethinkdb.config', + metrics=[{'type': 'gauge', 'path': 'servers'}, {'type': 'gauge', 'path': 'databases'}], + groups=[ + {'type': 'gauge', 'path': 'tables_per_database', 'key_tag': 'database'}, + {'type': 'gauge', 'path': 'secondary_indexes_per_table', 'key_tag': 'table'}, + ], +) + + +# System statistics. + +# See: https://rethinkdb.com/docs/system-stats#cluster +cluster_statistics = DocumentQuery( + source=operations.get_cluster_statistics, + name='cluster_statistics', + prefix='rethinkdb.stats.cluster', + metrics=[ + {'type': 'gauge', 'path': 'query_engine.queries_per_sec'}, + {'type': 'gauge', 'path': 'query_engine.read_docs_per_sec'}, + {'type': 'gauge', 'path': 'query_engine.written_docs_per_sec'}, + ], +) + +# See: https://rethinkdb.com/docs/system-stats#server +server_statistics = DocumentQuery( + source=operations.get_servers_statistics, + name='server_statistics', + prefix='rethinkdb.stats.server', + metrics=[ + {'type': 'gauge', 'path': 'query_engine.client_connections'}, + {'type': 'gauge', 'path': 'query_engine.clients_active'}, + {'type': 'gauge', 'path': 'query_engine.queries_per_sec'}, + {'type': 'monotonic_count', 'path': 'query_engine.queries_total'}, + {'type': 'gauge', 'path': 'query_engine.read_docs_per_sec'}, + {'type': 'monotonic_count', 'path': 'query_engine.read_docs_total'}, + {'type': 'gauge', 'path': 'query_engine.written_docs_per_sec'}, + {'type': 'monotonic_count', 'path': 'query_engine.written_docs_total'}, + ], +) + +# See: https://rethinkdb.com/docs/system-stats#table +table_statistics = DocumentQuery( + source=operations.get_tables_statistics, + name='table_statistics', + prefix='rethinkdb.stats.table', + metrics=[ + {'type': 'gauge', 'path': 'query_engine.read_docs_per_sec'}, + {'type': 'gauge', 'path': 'query_engine.written_docs_per_sec'}, + ], +) + +# See: https://rethinkdb.com/docs/system-stats#replica +replica_statistics = DocumentQuery( + source=operations.get_replicas_statistics, + name='replica_statistics', + prefix='rethinkdb.stats.table_server', + metrics=[ + {'type': 'gauge', 'path': 'query_engine.read_docs_per_sec'}, + {'type': 'monotonic_count', 'path': 'query_engine.read_docs_total'}, + {'type': 'gauge', 'path': 'query_engine.written_docs_per_sec'}, + {'type': 'monotonic_count', 'path': 'query_engine.written_docs_total'}, + {'type': 'gauge', 'path': 'storage_engine.cache.in_use_bytes'}, + {'type': 'gauge', 'path': 'storage_engine.disk.read_bytes_per_sec'}, + {'type': 'monotonic_count', 'path': 'storage_engine.disk.read_bytes_total'}, + {'type': 'gauge', 'path': 'storage_engine.disk.written_bytes_per_sec'}, + {'type': 'monotonic_count', 'path': 'storage_engine.disk.written_bytes_total'}, + {'type': 'gauge', 'path': 'storage_engine.disk.space_usage.metadata_bytes'}, + {'type': 'gauge', 'path': 'storage_engine.disk.space_usage.data_bytes'}, + {'type': 'gauge', 'path': 'storage_engine.disk.space_usage.garbage_bytes'}, + {'type': 'gauge', 'path': 'storage_engine.disk.space_usage.preallocated_bytes'}, + ], +) + + +# System status. + +# See: https://rethinkdb.com/docs/system-tables/#table_status +table_statuses = DocumentQuery( + source=operations.get_table_statuses, + name='table_status', + prefix='rethinkdb.table_status', + metrics=[ + {'type': 'service_check', 'path': 'status.ready_for_outdated_reads', 'transformer': transformers.ok_warning}, + {'type': 'service_check', 'path': 'status.ready_for_reads', 'transformer': transformers.ok_warning}, + {'type': 'service_check', 'path': 'status.ready_for_writes', 'transformer': transformers.ok_warning}, + {'type': 'service_check', 'path': 'status.all_replicas_ready', 'transformer': transformers.ok_warning}, + {'type': 'gauge', 'path': 'shards', 'transformer': transformers.length}, + ], + enumerations=[ + { + 'path': 'shards', + 'index_tag': 'shard', + 'metrics': [ + {'type': 'gauge', 'path': 'replicas', 'transformer': transformers.length}, + {'type': 'gauge', 'path': 'primary_replicas', 'transformer': transformers.length}, + ], + } + ], +) + +# See: https://rethinkdb.com/docs/system-tables/#server_status +server_statuses = DocumentQuery( + source=operations.get_server_statuses, + name='server_status', + prefix='rethinkdb.server_status', + metrics=[ + {'type': 'gauge', 'path': 'network.time_connected', 'transformer': transformers.to_time_elapsed}, + {'type': 'gauge', 'path': 'network.connected_to', 'transformer': transformers.length}, + {'type': 'gauge', 'path': 'process.time_started', 'transformer': transformers.to_time_elapsed}, + ], +) + + +# System jobs. + +# See: https://rethinkdb.com/docs/system-jobs/ +jobs_summary = DocumentQuery( + source=operations.get_jobs_summary, + name='jobs', + prefix='rethinkdb.system_jobs', + groups=[{'type': 'gauge', 'path': 'jobs', 'key_tag': 'job_type'}], +) + + +# System current issues. + +# See: https://rethinkdb.com/docs/system-issues/ +current_issues_summary = DocumentQuery( + source=operations.get_current_issues_summary, + name='current_issues', + prefix='rethinkdb.current_issues', + groups=[ + {'type': 'gauge', 'path': 'issues', 'key_tag': 'issue_type'}, + {'type': 'gauge', 'path': 'critical_issues', 'key_tag': 'issue_type'}, + ], +) diff --git a/rethinkdb/datadog_checks/rethinkdb/types.py b/rethinkdb/datadog_checks/rethinkdb/types.py new file mode 100644 index 0000000000000..4e75cb0b42324 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/types.py @@ -0,0 +1,159 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +""" +Declarations used for type checking our code (e.g. manipulation of JSON documents returned by RethinkDB). +""" +import datetime as dt +from typing import Any, List, Literal, Mapping, Tuple, TypedDict + +# Check interfaces. + +Instance = TypedDict( + 'Instance', + {'host': str, 'port': int, 'username': str, 'password': str, 'tls_ca_cert': str, 'tags': List[str]}, + total=False, +) + + +# Configuration documents. +# See: https://rethinkdb.com/docs/system-tables/#configuration-tables + +Server = TypedDict('Server', {'id': str, 'name': str, 'cache_size_mb': str, 'tags': List[str]}) + +Table = TypedDict('Table', {'id': str, 'name': str, 'db': str}) + +ConfigSummary = TypedDict( + 'ConfigSummary', + { + 'servers': int, + 'databases': int, + 'tables_per_database': Mapping[str, int], + 'secondary_indexes_per_table': Mapping[str, int], + }, +) + + +# System statistics documents. +# See: https://rethinkdb.com/docs/system-stats/ + +ClusterQueryEngine = TypedDict( + 'ClusterQueryEngine', {'queries_per_sec': int, 'read_docs_per_sec': int, 'written_docs_per_sec': int}, +) + +ClusterStats = TypedDict('ClusterStats', {'id': Tuple[Literal['cluster']], 'query_engine': ClusterQueryEngine}) + +ServerQueryEngine = TypedDict( + 'ServerQueryEngine', + { + 'client_connections': int, + 'clients_active': int, + 'queries_per_sec': int, + 'queries_total': int, + 'read_docs_per_sec': int, + 'read_docs_total': int, + 'written_docs_per_sec': int, + 'written_docs_total': int, + }, +) + +ServerStats = TypedDict( + 'ServerStats', {'id': Tuple[Literal['server'], str], 'server': str, 'query_engine': ServerQueryEngine}, +) + +TableQueryEngine = TypedDict('TableQueryEngine', {'read_docs_per_sec': int, 'written_docs_per_sec': int}) + +TableStats = TypedDict( + 'TableStats', {'id': Tuple[Literal['table'], str], 'table': str, 'db': str, 'query_engine': TableQueryEngine}, +) + +ReplicaQueryEngine = TypedDict( + 'ReplicaQueryEngine', + {'read_docs_per_sec': int, 'read_docs_total': int, 'written_docs_per_sec': int, 'written_docs_total': int}, +) + +ReplicaCache = TypedDict('ReplicaCache', {'in_use_bytes': int}) + +ReplicaDiskSpaceUsage = TypedDict( + 'ReplicaDiskSpaceUsage', {'metadata_bytes': int, 'data_bytes': int, 'garbage_bytes': int, 'preallocated_bytes': int} +) + +ReplicaDisk = TypedDict( + 'ReplicaDisk', + { + 'read_bytes_per_sec': int, + 'read_bytes_total': int, + 'written_bytes_per_sec': int, + 'written_bytes_total': int, + 'space_usage': ReplicaDiskSpaceUsage, + }, +) + +ReplicaStorageEngine = TypedDict('ReplicaStorageEngine', {'cache': ReplicaCache, 'disk': ReplicaDisk}) + +ReplicaStats = TypedDict( + 'ReplicaStats', + { + 'id': Tuple[Literal['table_server'], str, str], + 'server': str, + 'table': str, + 'db': str, + 'query_engine': ReplicaQueryEngine, + 'storage_engine': ReplicaStorageEngine, + }, +) + + +# Status documents. +# See: https://rethinkdb.com/docs/system-tables/#status-tables + +ShardReplica = TypedDict('ShardReplica', {'server': str, 'state': str}) + +Shard = TypedDict('Shard', {'primary_replicas': List[str], 'replicas': List[ShardReplica]}) + +TableStatusFlags = TypedDict( + 'TableStatusFlags', + {'ready_for_outdated_reads': bool, 'ready_for_reads': bool, 'ready_for_writes': bool, 'all_replicas_ready': bool}, +) + +TableStatus = TypedDict( + 'TableStatus', {'id': str, 'name': str, 'db': str, 'status': TableStatusFlags, 'shards': List[Shard]} +) + +ServerNetwork = TypedDict( + 'ServerNetwork', + { + # NOTE: only fields of interest are listed here. + 'time_connected': dt.datetime, + 'connected_to': Mapping[str, bool], + }, +) + +ServerProcess = TypedDict( + 'ServerProcess', {'argv': List[str], 'cache_size_mb': int, 'pid': int, 'time_started': dt.datetime, 'version': str}, +) + +ServerStatus = TypedDict('ServerStatus', {'id': str, 'name': str, 'network': ServerNetwork, 'process': ServerProcess}) + + +# System jobs. + +JobSummary = TypedDict('JobSummary', {'jobs': Mapping[str, int]}) + + +# System current issues. + +CurrentIssuesSummary = TypedDict( + 'CurrentIssuesSummary', {'issues': Mapping[str, int], 'critical_issues': Mapping[str, int]}, +) + + +# Miscellaneous. + +# See: https://rethinkdb.com/api/python/eq_join +# NOTE: Ideally 'left' and 'right' would be generics here, but this isn't supported by 'TypedDict' yet. +# See: https://github.com/python/mypy/issues/3863 +JoinRow = TypedDict('JoinRow', {'left': Any, 'right': Any}) + +# See: https://rethinkdb.com/api/python/server +ConnectionServer = TypedDict('ConnectionServer', {'id': str, 'name': str, 'proxy': bool}) diff --git a/rethinkdb/datadog_checks/rethinkdb/version.py b/rethinkdb/datadog_checks/rethinkdb/version.py new file mode 100644 index 0000000000000..4265350411ed5 --- /dev/null +++ b/rethinkdb/datadog_checks/rethinkdb/version.py @@ -0,0 +1,28 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import re + +# See: https://github.com/rethinkdb/rethinkdb/blob/95cfed8a62f08e3198ac25417c9b6900be8b6877/src/utils.hpp#L117 +_RETHINKDB_VERSION_STR_REGEX = re.compile(r'^rethinkdb\s+(?P[\d\.]+)') + + +def parse_version(rethinkdb_version_string): + # type: (str) -> str + """ + Given a RethinkDB version string, extract the SemVer version. + + Example + ------- + >>> parse_version('rethinkdb 2.4.0~0bionic (CLANG 6.0.0 (tags/RELEASE_600/final))') + '2.4.0' + """ + match = _RETHINKDB_VERSION_STR_REGEX.match(rethinkdb_version_string) + + if match is None: + message = 'Version string {!r} did not match pattern {!r}'.format( + rethinkdb_version_string, _RETHINKDB_VERSION_STR_REGEX + ) + raise ValueError(message) + + return match.group('rethinkdb_version') diff --git a/rethinkdb/manifest.json b/rethinkdb/manifest.json new file mode 100644 index 0000000000000..312379f8f12af --- /dev/null +++ b/rethinkdb/manifest.json @@ -0,0 +1,33 @@ +{ + "display_name": "RethinkDB", + "maintainer": "help@datadoghq.com", + "manifest_version": "1.0.0", + "name": "rethinkdb", + "metric_prefix": "rethinkdb.", + "metric_to_check": "rethinkdb.config.servers", + "creates_events": false, + "short_description": "Collect status, performance and other metrics from a RethinkDB cluster.", + "guid": "a09f3ed3-c947-413c-a9c6-0dcb641ea890", + "support": "core", + "supported_os": [ + "linux", + "mac_os", + "windows" + ], + "public_title": "Datadog-RethinkDB Integration", + "categories": [ + "data store" + ], + "type": "check", + "is_public": false, + "integration_id": "rethinkdb", + "assets": { + "configuration": { + "spec": "assets/configuration/spec.yaml" + }, + "dashboards": {}, + "monitors": {}, + "saved_views": {}, + "service_checks": "assets/service_checks.json" + } +} diff --git a/rethinkdb/metadata.csv b/rethinkdb/metadata.csv new file mode 100644 index 0000000000000..f2cb88619f452 --- /dev/null +++ b/rethinkdb/metadata.csv @@ -0,0 +1,40 @@ +metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name +rethinkdb.config.servers,gauge,,node,,Number of connected servers in the cluster.,0,rethinkdb,Servers +rethinkdb.config.databases,gauge,,,,Number of databases in the cluster.,0,rethinkdb,Databases +rethinkdb.config.tables_per_database,gauge,,table,,Number of tables in a given database.,0,rethinkdb,Tables per database +rethinkdb.config.secondary_indexes_per_table,gauge,,index,,Number of secondary indexes in a given table.,0,rethinkdb,Secondary indexes per table +rethinkdb.stats.cluster.query_engine.queries_per_sec,gauge,,query,second,Number of queries executed in a cluster per second.,0,rethinkdb,Cluster queries per sec +rethinkdb.stats.cluster.query_engine.read_docs_per_sec,gauge,,document,second,Number of documents read in a cluster per second.,0,rethinkdb,Cluster read docs per sec +rethinkdb.stats.cluster.query_engine.written_docs_per_sec,gauge,,document,second,Number of documents written in a cluster per second.,0,rethinkdb,Cluster written docs per sec +rethinkdb.stats.server.query_engine.queries_per_sec,gauge,,query,second,Number of queries executed on a server per second.,0,rethinkdb,Server queries per sec +rethinkdb.stats.server.query_engine.queries_total,count,,query,,Total number of queries executed on a server.,0,rethinkdb,Server queries total +rethinkdb.stats.server.query_engine.read_docs_per_sec,gauge,,document,second,Number of documents read from a server per second.,0,rethinkdb,Server read docs per sec +rethinkdb.stats.server.query_engine.read_docs_total,count,,document,,Total number of documents read from a server.,0,rethinkdb,Server read docs total +rethinkdb.stats.server.query_engine.written_docs_per_sec,gauge,,document,second,Number of documents written to a server per second.,0,rethinkdb,Server written docs per sec +rethinkdb.stats.server.query_engine.written_docs_total,count,,document,,Total number of documents written to a server.,0,rethinkdb,Server written docs total +rethinkdb.stats.server.query_engine.client_connections,gauge,,connection,,Current number of client connections to a server.,0,rethinkdb,Server client connections +rethinkdb.stats.server.query_engine.clients_active,gauge,,host,,Current number of clients actively connected to a server.,0,rethinkdb,Server clients active +rethinkdb.stats.table.query_engine.read_docs_per_sec,gauge,,document,second,Number of documents read from a table per second.,0,rethinkdb,Table read docs per sec +rethinkdb.stats.table.query_engine.written_docs_per_sec,gauge,,document,second,Number of documents written to a table per second.,0,rethinkdb,Table written docs per sec +rethinkdb.stats.table_server.query_engine.read_docs_per_sec,gauge,,document,second,Number of documents read from a replica per second.,0,rethinkdb,Replica read docs per sec +rethinkdb.stats.table_server.query_engine.read_docs_total,count,,document,,Total number of documents read from a replica.,0,rethinkdb,Replica read docs total +rethinkdb.stats.table_server.query_engine.written_docs_per_sec,gauge,,document,second,Number of documents written to a replica per second.,0,rethinkdb,Replica written docs per sec +rethinkdb.stats.table_server.query_engine.written_docs_total,count,,document,,Total number of documents written to a replica.,0,rethinkdb,Replica written docs total +rethinkdb.stats.table_server.storage_engine.cache.in_use_bytes,gauge,,byte,,Current amount of memory used by the cache on a replica.,0,rethinkdb,Replica cache bytes +rethinkdb.stats.table_server.storage_engine.disk.read_bytes_per_sec,gauge,,byte,second,Number of bytes read from the disk of a replica per second.,0,rethinkdb,Replica disk read bytes per sec +rethinkdb.stats.table_server.storage_engine.disk.read_bytes_total,count,,byte,,Total number of bytes read from the disk of a replica.,0,rethinkdb,Replica disk read bytes total +rethinkdb.stats.table_server.storage_engine.disk.written_bytes_per_sec,gauge,,byte,second,Number of bytes written to the disk of a replica per second.,0,rethinkdb,Replica disk written bytes per sec +rethinkdb.stats.table_server.storage_engine.disk.written_bytes_total,count,,byte,,Total number of bytes written to the disk of a replica.,0,rethinkdb,Replica disk written bytes total +rethinkdb.stats.table_server.storage_engine.disk.space_usage.metadata_bytes,gauge,,byte,,Current disk space used by metadata on a replica.,0,rethinkdb,Replica disk metadata bytes +rethinkdb.stats.table_server.storage_engine.disk.space_usage.data_bytes,gauge,,byte,,Current disk space used by data on a replica.,0,rethinkdb,Replica disk data bytes +rethinkdb.stats.table_server.storage_engine.disk.space_usage.garbage_bytes,gauge,,byte,,Current disk space used by the garbage collector on a replica.,0,rethinkdb,Replica disk garbage bytes +rethinkdb.stats.table_server.storage_engine.disk.space_usage.preallocated_bytes,gauge,,byte,,Current disk space preallocated on a replica.,0,rethinkdb,Replica disk preallocated bytes +rethinkdb.table_status.shards,gauge,,shard,,Total number of shards for a table.,0,rethinkdb,Table status shards +rethinkdb.table_status.shards.replicas,gauge,,node,,Total number of replicas for a table shard.,0,rethinkdb,Table shard replicas +rethinkdb.table_status.shards.primary_replicas,gauge,,node,,Total number of primary replicas for a table shard.,0,rethinkdb,Table shard primary replicas +rethinkdb.server_status.network.time_connected,gauge,,second,,Current total time a server has been connected to the network.,0,rethinkdb,Server time connected +rethinkdb.server_status.network.connected_to,gauge,,node,,Number of other RethinkDB servers a server is currently connected to.,0,rethinkdb,Server num connected to +rethinkdb.server_status.process.time_started,gauge,,second,,Time when the RethinkDB server process started.,0,rethinkdb,Server process time started +rethinkdb.system_jobs.jobs,gauge,,job,,"Total number of currently running system jobs, tagged by `job_type`.",0,rethinkdb,Jobs +rethinkdb.current_issues.issues,gauge,,,,Total number of current issues of a given issue_type.,0,rethinkdb,Issues +rethinkdb.current_issues.critical_issues,gauge,,,,Total number of critical current issues of a given issue_type.,0,rethinkdb,Critical issues diff --git a/rethinkdb/requirements-dev.txt b/rethinkdb/requirements-dev.txt new file mode 100644 index 0000000000000..98b5456bbd0e2 --- /dev/null +++ b/rethinkdb/requirements-dev.txt @@ -0,0 +1 @@ +-e ../datadog_checks_dev diff --git a/rethinkdb/requirements.in b/rethinkdb/requirements.in new file mode 100644 index 0000000000000..5b339f2f0d300 --- /dev/null +++ b/rethinkdb/requirements.in @@ -0,0 +1 @@ +rethinkdb==2.4.4 diff --git a/rethinkdb/setup.py b/rethinkdb/setup.py new file mode 100644 index 0000000000000..fe36d1ac3761a --- /dev/null +++ b/rethinkdb/setup.py @@ -0,0 +1,54 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from codecs import open # To use a consistent encoding +from os import path + +from setuptools import setup + +HERE = path.dirname(path.abspath(__file__)) + +# Get version info +ABOUT = {} +with open(path.join(HERE, 'datadog_checks', 'rethinkdb', '__about__.py')) as f: + exec(f.read(), ABOUT) + +# Get the long description from the README file +with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: + long_description = f.read() + + +CHECKS_BASE_REQ = 'datadog-checks-base>=11.2.0' + + +setup( + name='datadog-rethinkdb', + version=ABOUT['__version__'], + description='The RethinkDB check', + long_description=long_description, + long_description_content_type='text/markdown', + keywords='datadog agent rethinkdb check', + # The project's main homepage. + url='https://github.com/DataDog/integrations-core', + # Author details + author='Datadog', + author_email='packages@datadoghq.com', + # License + license='BSD-3-Clause', + # See https://pypi.org/classifiers + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Topic :: System :: Monitoring', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.8', + ], + # The package we're going to ship + packages=['datadog_checks.rethinkdb'], + # Run-time dependencies + install_requires=[CHECKS_BASE_REQ], + # Extra files to ship with the wheel package + include_package_data=True, +) diff --git a/rethinkdb/tests/__init__.py b/rethinkdb/tests/__init__.py new file mode 100644 index 0000000000000..46dd167dcde48 --- /dev/null +++ b/rethinkdb/tests/__init__.py @@ -0,0 +1,3 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) diff --git a/rethinkdb/tests/assertions.py b/rethinkdb/tests/assertions.py new file mode 100644 index 0000000000000..2e1895d97c810 --- /dev/null +++ b/rethinkdb/tests/assertions.py @@ -0,0 +1,152 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Set + +from datadog_checks.base import AgentCheck +from datadog_checks.base.stubs.aggregator import AggregatorStub +from datadog_checks.base.types import ServiceCheckStatus +from datadog_checks.rethinkdb.types import Instance + +from .common import ( + CLUSTER_STATISTICS_METRICS, + CONFIG_METRICS, + CURRENT_ISSUE_TYPES_SUBMITTED_IF_DISCONNECTED_SERVERS, + CURRENT_ISSUES_METRICS, + DATABASE, + HEROES_TABLE, + HEROES_TABLE_PRIMARY_REPLICA, + HEROES_TABLE_REPLICAS_BY_SHARD, + HEROES_TABLE_SERVERS, + IS_RETHINKDB_2_3, + JOBS_METRICS, + REPLICA_STATISTICS_METRICS, + SERVER_STATISTICS_METRICS, + SERVER_STATUS_METRICS, + SERVER_TAGS, + SERVERS, + TABLE_STATISTICS_METRICS, + TABLE_STATUS_METRICS, + TABLE_STATUS_SERVICE_CHECKS, + TABLE_STATUS_SHARDS_METRICS, + TAGS, +) +from .types import ServerName + + +def assert_service_checks(aggregator, instance, connect_status=AgentCheck.OK, disconnected_servers=None): + # type: (AggregatorStub, Instance, ServiceCheckStatus, Set[ServerName]) -> None + connect_tags = TAGS + ['host:{}'.format(instance['host']), 'port:{}'.format(instance['port'])] + aggregator.assert_service_check('rethinkdb.can_connect', connect_status, count=1, tags=connect_tags) + + for service_check in TABLE_STATUS_SERVICE_CHECKS: + count = 0 if connect_status == AgentCheck.CRITICAL else 1 + + if disconnected_servers: + status = AgentCheck.OK if service_check.endswith('ready_for_outdated_reads') else AgentCheck.WARNING + else: + status = AgentCheck.OK + + tags = TAGS + ['table:{}'.format(HEROES_TABLE), 'database:{}'.format(DATABASE)] + + aggregator.assert_service_check(service_check, status, count=count, tags=tags) + + +def assert_metrics(aggregator, is_proxy, disconnected_servers=None): + # type: (AggregatorStub, bool, Set[ServerName]) -> None + if disconnected_servers is None: + disconnected_servers = set() + + _assert_config_metrics(aggregator, disconnected_servers=disconnected_servers) + _assert_statistics_metrics(aggregator, disconnected_servers=disconnected_servers) + _assert_table_status_metrics(aggregator) + _assert_server_status_metrics(aggregator, disconnected_servers=disconnected_servers) + _assert_jobs_metrics(aggregator, is_proxy=is_proxy) + _assert_current_issues_metrics(aggregator, disconnected_servers=disconnected_servers) + + +def _assert_config_metrics(aggregator, disconnected_servers): + # type: (AggregatorStub, Set[ServerName]) -> None + for metric, typ, value, tags in CONFIG_METRICS: + if callable(value): + value = value(disconnected_servers) + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=TAGS + tags, value=value) + + +def _assert_statistics_metrics(aggregator, disconnected_servers): + # type: (AggregatorStub, Set[ServerName]) -> None + for metric, typ in CLUSTER_STATISTICS_METRICS: + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=TAGS) + + for server in SERVERS: + tags = TAGS + ['server:{}'.format(server)] + SERVER_TAGS[server] + for metric, typ in SERVER_STATISTICS_METRICS: + count = 0 if server in disconnected_servers else 1 + aggregator.assert_metric(metric, metric_type=typ, count=count, tags=tags) + + for metric, typ in TABLE_STATISTICS_METRICS: + tags = TAGS + ['table:{}'.format(HEROES_TABLE), 'database:{}'.format(DATABASE)] + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=tags) + + for server in HEROES_TABLE_SERVERS: + tags = ( + TAGS + + ['table:{}'.format(HEROES_TABLE), 'database:{}'.format(DATABASE), 'server:{}'.format(server)] + + SERVER_TAGS[server] + ) + + for metric, typ in REPLICA_STATISTICS_METRICS: + if server in disconnected_servers: + aggregator.assert_metric(metric, count=0, tags=tags) + continue + + # Assumption: cluster is stable (not currently rebalancing), so only these two states can exist. + state = 'waiting_for_primary' if HEROES_TABLE_PRIMARY_REPLICA in disconnected_servers else 'ready' + state_tag = 'state:{}'.format(state) + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=tags + [state_tag]) + + +def _assert_table_status_metrics(aggregator): + # type: (AggregatorStub) -> None + for metric, typ in TABLE_STATUS_METRICS: + tags = TAGS + ['table:{}'.format(HEROES_TABLE), 'database:{}'.format(DATABASE)] + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=tags) + + for shard in HEROES_TABLE_REPLICAS_BY_SHARD: + tags = TAGS + ['table:{}'.format(HEROES_TABLE), 'database:{}'.format(DATABASE), 'shard:{}'.format(shard)] + + for metric, typ in TABLE_STATUS_SHARDS_METRICS: + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=tags) + + +def _assert_server_status_metrics(aggregator, disconnected_servers): + # type: (AggregatorStub, Set[ServerName]) -> None + for metric, typ in SERVER_STATUS_METRICS: + for server in SERVERS: + tags = TAGS + ['server:{}'.format(server)] + count = 0 if server in disconnected_servers else 1 + aggregator.assert_metric(metric, metric_type=typ, count=count, tags=tags) + + +def _assert_jobs_metrics(aggregator, is_proxy): + # type: (AggregatorStub, bool) -> None + for metric, typ, value, tags in JOBS_METRICS: + if 'job_type:query' in tags and is_proxy and IS_RETHINKDB_2_3: + # For some reason, queries issued to retrieve metrics via a proxy server are not included + # in system jobs under RethinkDB 2.3. + count = 0 + else: + count = 1 + + aggregator.assert_metric(metric, metric_type=typ, value=value, count=count, tags=TAGS + tags) + + +def _assert_current_issues_metrics(aggregator, disconnected_servers): + # type: (AggregatorStub, Set[ServerName]) -> None + for metric, typ in CURRENT_ISSUES_METRICS: + if disconnected_servers: + for issue_type in CURRENT_ISSUE_TYPES_SUBMITTED_IF_DISCONNECTED_SERVERS: + tags = TAGS + ['issue_type:{}'.format(issue_type)] + aggregator.assert_metric(metric, metric_type=typ, count=1, tags=tags) + else: + aggregator.assert_metric(metric, metric_type=typ, count=0) diff --git a/rethinkdb/tests/cluster.py b/rethinkdb/tests/cluster.py new file mode 100644 index 0000000000000..12ef6c1615d40 --- /dev/null +++ b/rethinkdb/tests/cluster.py @@ -0,0 +1,129 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import logging +from contextlib import contextmanager +from typing import Iterator, List + +import rethinkdb +from rethinkdb import r + +from datadog_checks.dev.conditions import WaitFor +from datadog_checks.dev.docker import temporarily_stop_service + +from .common import ( + AGENT_PASSWORD, + AGENT_USER, + CLIENT_USER, + COMPOSE_FILE, + DATABASE, + HEROES_TABLE, + HEROES_TABLE_CONFIG, + HEROES_TABLE_DOCUMENTS, + HEROES_TABLE_INDEX_FIELD, + HOST, + SERVER_PORTS, +) + +logger = logging.getLogger(__name__) + + +def setup_cluster(): + # type: () -> None + """ + Configure the test cluster. + """ + logger.debug('setup_cluster') + + with r.connect(host=HOST, port=SERVER_PORTS['server0']) as conn: + # A test DB is automatically created, but we don't use it and it would skew our metrics. + response = r.db_drop('test').run(conn) + assert response['dbs_dropped'] == 1 + + # Cluster content. + response = r.db_create(DATABASE).run(conn) + assert response['dbs_created'] == 1 + response = r.db(DATABASE).table_create(HEROES_TABLE, **HEROES_TABLE_CONFIG).run(conn) + assert response['tables_created'] == 1 + response = r.db(DATABASE).table(HEROES_TABLE).index_create(HEROES_TABLE_INDEX_FIELD).run(conn) + assert response['created'] == 1 + + response = r.db(DATABASE).table(HEROES_TABLE).wait(timeout=1).run(conn) + assert response['ready'] == 1 + + # Users. + # See: https://rethinkdb.com/docs/permissions-and-accounts/ + + if AGENT_USER != 'admin': + # Setup a dedicated Agent user. + response = r.db('rethinkdb').table('users').insert({'id': AGENT_USER, 'password': AGENT_PASSWORD}).run(conn) + assert response['inserted'] == 1 + response = r.db('rethinkdb').grant(AGENT_USER, {'read': True}).run(conn) + assert response['granted'] == 1 + + response = r.db('rethinkdb').table('users').insert({'id': CLIENT_USER, 'password': False}).run(conn) + assert response['inserted'] == 1 + response = r.db(DATABASE).grant(CLIENT_USER, {'read': True, 'write': True}).run(conn) + assert response['granted'] == 1 + + # Simulate client activity. + # NOTE: ensures that 'written_docs_*' and 'read_docs_*' metrics have non-zero values. + + with r.connect(host=HOST, port=SERVER_PORTS['server0'], user=CLIENT_USER) as conn: + response = r.db(DATABASE).table(HEROES_TABLE).insert(HEROES_TABLE_DOCUMENTS).run(conn) + assert response['inserted'] == len(HEROES_TABLE_DOCUMENTS) + + documents = list(r.db(DATABASE).table(HEROES_TABLE).run(conn)) + assert len(documents) == len(HEROES_TABLE_DOCUMENTS) + + +@contextmanager +def temporarily_disconnect_server(server): + # type: (str) -> Iterator[None] + """ + Gracefully disconnect a server from the cluster. + + Ensures that the cluster/replicas are in a stable state (not rebalancing) inside and after exiting the context. + """ + service = 'rethinkdb-{}'.format(server) + logger.debug('temporarily_disconnect_server server=%r service=%r', server, service) + + def _server_exists(conn): + # type: (rethinkdb.net.Connection) -> bool + servers = r.db('rethinkdb').table('server_status').map(r.row['name']).run(conn) # type: List[str] + logger.debug('server_exists server=%r servers=%r', server, servers) + return server in servers + + def _leader_election_done(conn): + # type: (rethinkdb.net.Connection) -> bool + STABLE_REPLICA_STATES = {'ready', 'waiting_for_primary', 'disconnected'} + + replica_states = list( + r.db('rethinkdb') + .table('table_status') + .concat_map(r.row['shards'].default([])) # May be `None` on 2.3.x. + .concat_map(r.row['replicas']) + .map(r.row['state']) + .run(conn) + ) # type: List[str] + + logger.debug('replica_states %r', replica_states) + + return all(state in STABLE_REPLICA_STATES for state in replica_states) + + def _server_disconnected(conn): + # type: (rethinkdb.net.Connection) -> bool + return not _server_exists(conn) and _leader_election_done(conn) + + def _server_reconnected(conn): + # type: (rethinkdb.net.Connection) -> bool + return _server_exists(conn) and _leader_election_done(conn) + + with temporarily_stop_service(service, compose_file=COMPOSE_FILE): + with r.connect(host=HOST, port=SERVER_PORTS['server0']) as conn: + WaitFor(lambda: _server_disconnected(conn))() + + yield + + with r.connect(host=HOST, port=SERVER_PORTS['server0']) as conn: + WaitFor(lambda: _server_reconnected(conn))() diff --git a/rethinkdb/tests/common.py b/rethinkdb/tests/common.py new file mode 100644 index 0000000000000..48a950d72a6b2 --- /dev/null +++ b/rethinkdb/tests/common.py @@ -0,0 +1,214 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import os +from typing import Callable, Dict, List, Set, Tuple, Union + +import pytest + +from datadog_checks.base.stubs.aggregator import AggregatorStub +from datadog_checks.dev import get_docker_hostname, get_here + +from .types import ServerName + +HERE = get_here() + +IMAGE = os.environ.get('RETHINKDB_IMAGE', '') +RAW_VERSION = os.environ.get('RETHINKDB_RAW_VERSION', '') +IS_RETHINKDB_2_3 = RAW_VERSION.startswith('2.3.') + +HOST = get_docker_hostname() + +TAGS = ['env:testing'] + +# Servers. +# NOTE: server information is tightly coupled to the Docker Compose setup. + +SERVERS = {'server0', 'server1', 'server2'} # type: Set[ServerName] +BOOTSTRAP_SERVER = 'server0' # type: ServerName +SERVER_PORTS = {'server0': 28015, 'server1': 28016, 'server2': 28017, 'proxy': 28018} # type: Dict[ServerName, int] +SERVER_TAGS = { + 'server0': ['default', 'us'], + 'server1': ['default', 'us', 'primary'], + 'server2': ['default', 'eu'], +} # type: Dict[ServerName, List[str]] + +# Users. + +if IS_RETHINKDB_2_3: + # In RethinkDB 2.3.x, granting permissions onto `rethinkdb` database to non-admin users is not supported. + # So we must use the admin account. + # See: https://github.com/rethinkdb/rethinkdb/issues/5692 + AGENT_USER = 'admin' + AGENT_PASSWORD = '' +else: + # Use a dedicated user for metric collection. + AGENT_USER = 'datadog-agent' + AGENT_PASSWORD = 'r3th1nK' + +CLIENT_USER = 'doggo' + +# TLS. + +TLS_SERVER = 'server1' # type: ServerName +TLS_DRIVER_KEY = os.path.join(HERE, 'data', 'tls', 'server.key') +TLS_DRIVER_CERT = os.path.join(HERE, 'data', 'tls', 'server.pem') +TLS_CLIENT_CERT = os.path.join(HERE, 'data', 'tls', 'client.pem') + +# Database content. + +DATABASE = 'doghouse' + +HEROES_TABLE = 'heroes' +HEROES_TABLE_CONFIG = { + 'shards': 1, + 'replicas': {'primary': 1, 'eu': 1}, + 'primary_replica_tag': 'primary', +} +HEROES_TABLE_SERVERS = {'server1', 'server2'} # type: Set[ServerName] +HEROES_TABLE_PRIMARY_REPLICA = 'server1' # type: ServerName +HEROES_TABLE_REPLICAS_BY_SHARD = {0: HEROES_TABLE_SERVERS} +HEROES_TABLE_DOCUMENTS = [ + { + "hero": "Magneto", + "name": "Max Eisenhardt", + "aka": ["Magnus", "Erik Lehnsherr", "Lehnsherr"], + "magazine_titles": ["Alpha Flight", "Avengers", "Avengers West Coast"], + "appearances_count": 42, + }, + { + "hero": "Professor Xavier", + "name": "Charles Francis Xavier", + "magazine_titles": ["Alpha Flight", "Avengers", "Bishop", "Defenders"], + "appearances_count": 72, + }, + { + "hero": "Storm", + "name": "Ororo Monroe", + "magazine_titles": ["Amazing Spider-Man vs. Wolverine", "Excalibur", "Fantastic Four", "Iron Fist"], + "appearances_count": 72, + }, +] +HEROES_TABLE_INDEX_FIELD = 'appearances_count' + +# Metrics lists. +# NOTE: jobs metrics are not listed here as they're hard to trigger, so they're covered by unit tests instead. + +CONFIG_METRICS = ( + ( + 'rethinkdb.config.servers', + AggregatorStub.GAUGE, + lambda disconnected_servers: len(SERVERS) - len(disconnected_servers), + [], + ), + ('rethinkdb.config.databases', AggregatorStub.GAUGE, 1, []), + ('rethinkdb.config.tables_per_database', AggregatorStub.GAUGE, 1, ['database:{}'.format(DATABASE)]), + ('rethinkdb.config.secondary_indexes_per_table', AggregatorStub.GAUGE, 1, ['table:{}'.format(HEROES_TABLE)]), +) # type: Tuple[Tuple[str, int, Union[int, Callable[[set], int]], List[str]], ...] + +CLUSTER_STATISTICS_METRICS = ( + ('rethinkdb.stats.cluster.query_engine.queries_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.cluster.query_engine.read_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.cluster.query_engine.written_docs_per_sec', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +SERVER_STATISTICS_METRICS = ( + ('rethinkdb.stats.server.query_engine.queries_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.server.query_engine.queries_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.server.query_engine.read_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.server.query_engine.read_docs_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.server.query_engine.written_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.server.query_engine.written_docs_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.server.query_engine.client_connections', AggregatorStub.GAUGE), + ( + # NOTE: submitted but not documented on the RethinkDB website. + 'rethinkdb.stats.server.query_engine.clients_active', + AggregatorStub.GAUGE, + ), +) # type: Tuple[Tuple[str, int], ...] + +TABLE_STATISTICS_METRICS = ( + ('rethinkdb.stats.table.query_engine.read_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.table.query_engine.written_docs_per_sec', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +REPLICA_STATISTICS_METRICS = ( + ('rethinkdb.stats.table_server.query_engine.read_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.query_engine.read_docs_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.table_server.query_engine.written_docs_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.query_engine.written_docs_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.table_server.storage_engine.cache.in_use_bytes', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.read_bytes_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.read_bytes_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.table_server.storage_engine.disk.written_bytes_per_sec', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.written_bytes_total', AggregatorStub.MONOTONIC_COUNT), + ('rethinkdb.stats.table_server.storage_engine.disk.space_usage.metadata_bytes', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.space_usage.data_bytes', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.space_usage.garbage_bytes', AggregatorStub.GAUGE), + ('rethinkdb.stats.table_server.storage_engine.disk.space_usage.preallocated_bytes', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +TABLE_STATUS_SERVICE_CHECKS = ( + 'rethinkdb.table_status.status.ready_for_outdated_reads', + 'rethinkdb.table_status.status.ready_for_reads', + 'rethinkdb.table_status.status.ready_for_writes', + 'rethinkdb.table_status.status.all_replicas_ready', +) + +TABLE_STATUS_METRICS = (('rethinkdb.table_status.shards', AggregatorStub.GAUGE),) # type: Tuple[Tuple[str, int], ...] + +TABLE_STATUS_SHARDS_METRICS = ( + ('rethinkdb.table_status.shards.replicas', AggregatorStub.GAUGE), + ('rethinkdb.table_status.shards.primary_replicas', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +SERVER_STATUS_METRICS = ( + ('rethinkdb.server_status.network.time_connected', AggregatorStub.GAUGE), + ('rethinkdb.server_status.network.connected_to', AggregatorStub.GAUGE), + ('rethinkdb.server_status.process.time_started', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +JOBS_METRICS = ( + ('rethinkdb.system_jobs.jobs', AggregatorStub.GAUGE, 1, ['job_type:query'],), +) # type: Tuple[Tuple[str, int, int, List[str]], ...] + +CURRENT_ISSUES_METRICS = ( + ('rethinkdb.current_issues.issues', AggregatorStub.GAUGE), + ('rethinkdb.current_issues.critical_issues', AggregatorStub.GAUGE), +) # type: Tuple[Tuple[str, int], ...] + +CURRENT_ISSUE_TYPES_SUBMITTED_IF_DISCONNECTED_SERVERS = ['table_availability'] + +E2E_METRICS = ( + tuple((name, typ) for name, typ, _, _ in CONFIG_METRICS) + + CLUSTER_STATISTICS_METRICS + + SERVER_STATISTICS_METRICS + + TABLE_STATISTICS_METRICS + + REPLICA_STATISTICS_METRICS + + TABLE_STATUS_METRICS + + TABLE_STATUS_SHARDS_METRICS + + SERVER_STATUS_METRICS + + tuple((name, typ) for name, typ, _, _ in JOBS_METRICS) +) # type: Tuple[Tuple[str, int], ...] + +# Docker Compose configuration. + +COMPOSE_FILE = os.path.join(HERE, 'compose', 'docker-compose.yaml') + +COMPOSE_ENV_VARS = env_vars = { + 'RETHINKDB_IMAGE': IMAGE, + 'RETHINKDB_PORT_SERVER0': str(SERVER_PORTS['server0']), + 'RETHINKDB_PORT_SERVER1': str(SERVER_PORTS['server1']), + 'RETHINKDB_PORT_SERVER2': str(SERVER_PORTS['server2']), + 'RETHINKDB_PORT_PROXY': str(SERVER_PORTS['proxy']), + 'RETHINKDB_TLS_DRIVER_KEY': TLS_DRIVER_KEY, + 'RETHINKDB_TLS_DRIVER_CERT': TLS_DRIVER_CERT, +} + +# Pytest common test data. + +MALFORMED_VERSION_STRING_PARAMS = [ + pytest.param('rethinkdb (GCC 4.9.2)', id='no-version'), + pytest.param('rethinkdb', id='prefix-only'), + pytest.param('abc 2.4.0~0bionic (GCC 4.9.2)', id='wrong-prefix'), +] diff --git a/rethinkdb/tests/compose/docker-compose.yaml b/rethinkdb/tests/compose/docker-compose.yaml new file mode 100644 index 0000000000000..fcc48c8b644e4 --- /dev/null +++ b/rethinkdb/tests/compose/docker-compose.yaml @@ -0,0 +1,64 @@ +version: "3" + +services: + # 3-node RethinkDB cluster with 1 proxy node. + + rethinkdb-server0: + # NOTE: `tty` is required for RethinkDB 2.4.0, otherwise `docker logs` won't see logs. + # This due to an issue with I/O buffering on this version, see: https://github.com/rethinkdb/rethinkdb/issues/6819 + tty: true + image: ${RETHINKDB_IMAGE} + container_name: rethinkdb-server0 + command: rethinkdb --bind all --server-name server0 --server-tag us + ports: + - ${RETHINKDB_PORT_SERVER0}:28015 # Client driver port. + - 8080:8080 # Port for the web UI. Debugging only (not used by tests). + + rethinkdb-server1: + tty: true + image: ${RETHINKDB_IMAGE} + container_name: rethinkdb-server1 + command: | + rethinkdb + --join rethinkdb-server0:29015 + --bind all + --server-name server1 + --server-tag us + --server-tag primary + --http-tls-key /opt/server.key + --http-tls-cert /opt/server.pem + --driver-tls-key /opt/server.key + --driver-tls-cert /opt/server.pem + volumes: + - ${RETHINKDB_TLS_DRIVER_KEY}:/opt/server.key + - ${RETHINKDB_TLS_DRIVER_CERT}:/opt/server.pem + links: + - rethinkdb-server0 + depends_on: + - rethinkdb-server0 + ports: + - ${RETHINKDB_PORT_SERVER1}:28015 + + rethinkdb-server2: + tty: true + image: ${RETHINKDB_IMAGE} + container_name: rethinkdb-server2 + command: rethinkdb --join rethinkdb-server0:29015 --bind all --server-name server2 --server-tag eu + links: + - rethinkdb-server0 + depends_on: + - rethinkdb-server0 + ports: + - ${RETHINKDB_PORT_SERVER2}:28015 + + rethinkdb-proxy0: + tty: true + image: ${RETHINKDB_IMAGE} + container_name: rethinkdb-proxy0 + command: rethinkdb proxy --join rethinkdb-server0:29015 --bind all + links: + - rethinkdb-server0 + depends_on: + - rethinkdb-server0 + ports: + - ${RETHINKDB_PORT_PROXY}:28015 diff --git a/rethinkdb/tests/conftest.py b/rethinkdb/tests/conftest.py new file mode 100644 index 0000000000000..a5869402ca66e --- /dev/null +++ b/rethinkdb/tests/conftest.py @@ -0,0 +1,48 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Iterator + +import pytest + +from datadog_checks.dev import docker_run +from datadog_checks.dev.conditions import CheckDockerLogs +from datadog_checks.rethinkdb.types import Instance + +from .cluster import setup_cluster +from .common import ( + AGENT_PASSWORD, + AGENT_USER, + BOOTSTRAP_SERVER, + COMPOSE_ENV_VARS, + COMPOSE_FILE, + HOST, + SERVER_PORTS, + SERVERS, + TAGS, +) + + +@pytest.fixture(scope='session') +def instance(): + # type: () -> Instance + return { + 'host': HOST, + 'port': SERVER_PORTS['server0'], + 'username': AGENT_USER, + 'password': AGENT_PASSWORD, + 'tags': TAGS, + } + + +@pytest.fixture(scope='session') +def dd_environment(instance): + # type: (Instance) -> Iterator + log_patterns = [r'Server ready, "{}".*'.format(BOOTSTRAP_SERVER), r'Connected to proxy.*'] + log_patterns.extend(r'Connected to server "{}".*'.format(server) for server in SERVERS - {BOOTSTRAP_SERVER}) + wait_servers_ready = CheckDockerLogs(COMPOSE_FILE, patterns=log_patterns, matches='all') + + conditions = [wait_servers_ready, setup_cluster] + + with docker_run(COMPOSE_FILE, conditions=conditions, env_vars=COMPOSE_ENV_VARS): + yield instance diff --git a/rethinkdb/tests/data/tls/README.md b/rethinkdb/tests/data/tls/README.md new file mode 100644 index 0000000000000..dde1c51089364 --- /dev/null +++ b/rethinkdb/tests/data/tls/README.md @@ -0,0 +1,21 @@ +# TLS certificates + +TLS certificates were generated using [`trustme-cli`](https://github.com/sethmlarson/trustme-cli): + +```bash +trustme-cli --common-name localhost +mv server.key server.pem client.pem rethinkdb/tests/data/tls/ +``` + +To connect to a server configured with these certificates, use: + +```python +import os +from rethinkdb import r + +ca_certs = os.path.join('rethinkdb', 'tests', 'data', 'tls', 'client.pem') +port = 28016 # TODO: adjust to the server you want to connect to. +conn = r.connect(port=port, ssl={'ca_certs': ca_certs}) +``` + +See also: https://rethinkdb.com/docs/security/#securing-the-driver-port diff --git a/rethinkdb/tests/data/tls/client.pem b/rethinkdb/tests/data/tls/client.pem new file mode 100644 index 0000000000000..9db7292eaa46c --- /dev/null +++ b/rethinkdb/tests/data/tls/client.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDfzCCAmegAwIBAgIUQSxcZpbgiAtbD0MiJ9/W4I1bANAwDQYJKoZIhvcNAQEL +BQAwQDEXMBUGA1UECgwOdHJ1c3RtZSB2MC42LjAxJTAjBgNVBAsMHFRlc3Rpbmcg +Q0EgI1huaHFmcFdJWWp5Rzc2MmwwHhcNMDAwMTAxMDAwMDAwWhcNMzgwMTAxMDAw +MDAwWjBAMRcwFQYDVQQKDA50cnVzdG1lIHYwLjYuMDElMCMGA1UECwwcVGVzdGlu +ZyBDQSAjWG5ocWZwV0lZanlHNzYybDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAKKxnEEXL749nMdsvvBWIvUvwBx7gpKopxFOU1LhwlCATDESwCP6GPDw +/cgYIxbINDATLk8JMxduWKkC4asfVutUCdlZoNG7iCXU2CxeZPmxvh0Or6m0yCO7 +XYDeihXSsFvmvZw2JYMdzCFZ7ltWcqtzSvFuKZQrBoEPW1o2+fiRwNB2AFWR0Ez1 +1Y/d7///Od6CNoanT9fRY6NFO1k4NH+Netj2igoJX1aRnOxmzVEPwYYkPk/11cJt +6nteYblPbQqluBD/8bEfDB9rUbWi4TFG7ilqGgRmestaIV467I0iNc42qmVVz870 +Kuanpw4i9O4CmfSy/NDtHWloNdaXuR0CAwEAAaNxMG8wHQYDVR0OBBYEFI8n9Qo4 +cCsyQyihaEN6dOv5lKLfMBIGA1UdEwEB/wQIMAYBAf8CAQkwDgYDVR0PAQH/BAQD +AgEGMCoGA1UdJQEB/wQgMB4GCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMw +DQYJKoZIhvcNAQELBQADggEBAFJ2dhOSK+8vuUGWjL+7uSLwzb79EpTga9xa3rg0 +wfA18LZCapRSacvfG2lhnTgqfKp8JsecHMZfhqXFoqhMXZ3Bo9YTKmT7UlasPnbb +09/YcYXCUanBqi0BA+2Onl4hi5PsWdvqF9/AypQrjY7XsrbuAfnihJvmtbqCIMv7 +47f+qvFBKHGsXwcpCICB6woLRSN9JMQ2t0eJ9gQANG2smZQ/aHKzfhdn+qUu1f5u +3/QpZ3huvKcdw04McCDtOeqbx3RbQ4aczd1ZkUC396fVYqQw0Dtg/AEKMz0fZLvH +2ZZbuVCpKA8i652anYhVRsZG8xWYnP/VhTfBlT9AbiyT9AI= +-----END CERTIFICATE----- diff --git a/rethinkdb/tests/data/tls/server.key b/rethinkdb/tests/data/tls/server.key new file mode 100644 index 0000000000000..c12d805cd61e0 --- /dev/null +++ b/rethinkdb/tests/data/tls/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEAy4ZuvuprIyUAm8esD6QM2TKf0dwlpgvNRL+O7QhY9+XVq4+S +ww7pIVcVjqzoVHRs58HyD5ekHlT6Uc+d9Ql+KC5JlfAT1M7klslcBQfFCzYMCB7N +JDc9cjTzls1hUtrpekRqonn6FSreZHD8uHGrNPicxxLp1KsGmWuSQP+a9Lkfq8k1 +/O3hZb+4ypVbigqxmLU9hrJoHpvVIqRUdhiPYjtovfHk94cJThWi/chtdJxxChCS +fXuCwmMsbCz24RBsZStuCLqIkLtycc4kAuahOFUZDMNAejIG01bRtmvCHjYsQq0B +92DB0H6TUGIa9wt5kcSSS2gf8XAEtQ3pC8MsQwIDAQABAoIBAHSvoHLo54AAyPaH +ZgiZn5wvQB4Lv7IRdiCCSylu7cNDDFwONrCKrfKlgQCJ9bHmBkJhmayC3l9djR62 +j/Na+++BikLioBwUek5RJ+bia4Bf/knxjt/CpAIEhdvh6HieddnZu+FnWZAlQdgI +R3xf/y6hkxE3sMRzQZdYa0PEyEyhiLgl7iFePaHOQ24waSU2twGq4rlR2i3NVYA6 +kAMiS7CWNP7EGL/v47qZLEqDIbK75mHCFCspz4oTvS8gTtojd0YOg1JNlwft4OUU +wz+9StLgY2wOH6OFfhakig1y2hdt0JnqyEyvCGTVEzaHYIAoDk/bIHZAJZ35cqzQ ++ht6W5ECgYEA7EYnkaxDYuDWb+lhbCi3ZV9+srse1N1rArUtixZk9CTcoCTWpnmF +hjtamMwUcgI37XrqVSiBxOBLNN1//0pXK7Kj2VSN4v4zU/2Vm0QSyhH1CdFnDoAs +Dlqvn2hQtxYBOWoa+2J2rjwAdA8/zRVWTqlf5JpAnlb8h+OYqglxiycCgYEA3IRX +S0jh/Od1WynOPtap5G2ifPoC7xXqnBMeyQ//mHC3I+p2ZQqLcO7cE/BvstwVAnbu +mnJTudm7XeLSiQ/HX6xtKdb03VlEZ07cBaDb+LlsbiaRvFCSHwiA0df1TQzeImP1 +oi+JxWR2i6RRHh/JNXuFlYzZbXnq5ESy/rBrt4UCgYAHGFQ326RqY7YMxkVWqiJX +uPZlB5l1avC7tBUaCCIf8rttU63ecefAkCe4TuiOQ+LKDtbal8zNzK51d/FQCTt0 +5HhnHlLkbh28d/D2KKUnzEI/eTpPBSpNhAuiDc/er5p5zuXghqRptwOMeKjBz9I3 +qn5mrvshxhrvObZr5Ly0IQKBgAfHg2zcfVx2reUxbF07JXMxivHtv/y/QS+QXR2q +utGd6FgUBWk7HgA17P2nFcmxiew0VeaM8fc0fy+ouNStAdOWOaOoOas1YvxiA9EP +A7OalwCry6hhD0aY/jJo8/Zr5cpAzIGUO8AC80up2FK/vFFush+8gE8kZez3n3js +sGAhAoGAPHYw2QpAkF37xJJEBPelGcPvbTOpgkQt6EVsjEnWMU+ljfGr2dqOqBHn +++yH9b5yZzYQhh0o6rB6cBe+tUJ/oNxckT2uu5ovXtAmIj/NLNNU+HAjlTUPWGTW +p0VLlKYwg76yy2N4dbJy9I3a0I9gKXZH1s4K2i+FGJQhWX5/e/M= +-----END RSA PRIVATE KEY----- diff --git a/rethinkdb/tests/data/tls/server.pem b/rethinkdb/tests/data/tls/server.pem new file mode 100644 index 0000000000000..aee254d2c5d38 --- /dev/null +++ b/rethinkdb/tests/data/tls/server.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDpjCCAo6gAwIBAgIUYJxtZnXFrD4YsEygUZDBe0DTzmIwDQYJKoZIhvcNAQEL +BQAwQDEXMBUGA1UECgwOdHJ1c3RtZSB2MC42LjAxJTAjBgNVBAsMHFRlc3Rpbmcg +Q0EgI1huaHFmcFdJWWp5Rzc2MmwwHhcNMDAwMTAxMDAwMDAwWhcNMzgwMTAxMDAw +MDAwWjBWMRcwFQYDVQQKDA50cnVzdG1lIHYwLjYuMDEnMCUGA1UECwweVGVzdGlu +ZyBjZXJ0ICNNRkF3SVQ3RU5Zd3NnVTZ5MRIwEAYDVQQDDAlsb2NhbGhvc3QwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDLhm6+6msjJQCbx6wPpAzZMp/R +3CWmC81Ev47tCFj35dWrj5LDDukhVxWOrOhUdGznwfIPl6QeVPpRz531CX4oLkmV +8BPUzuSWyVwFB8ULNgwIHs0kNz1yNPOWzWFS2ul6RGqiefoVKt5kcPy4cas0+JzH +EunUqwaZa5JA/5r0uR+ryTX87eFlv7jKlVuKCrGYtT2Gsmgem9UipFR2GI9iO2i9 +8eT3hwlOFaL9yG10nHEKEJJ9e4LCYyxsLPbhEGxlK24IuoiQu3JxziQC5qE4VRkM +w0B6MgbTVtG2a8IeNixCrQH3YMHQfpNQYhr3C3mRxJJLaB/xcAS1DekLwyxDAgMB +AAGjgYEwfzAdBgNVHQ4EFgQUIo0NrhsVWeSddfnvrXACJLVGVxEwDAYDVR0TAQH/ +BAIwADAfBgNVHSMEGDAWgBSPJ/UKOHArMkMooWhDenTr+ZSi3zAvBgNVHREBAf8E +JTAjgglsb2NhbGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcN +AQELBQADggEBACdg5dhTitGlWEiV2qTDApUBhGOp0ZyLA6Gq2YCXdSc+nwgkq8j6 +mAVeQXu14zri+aXLUZJK7wsbCVErIb7RWbDAoiQYw9gKd8soNE5vVCKDueCXUN24 +5oPosNBOZI4CkSS/dQrOJPXGvmyn40e46fZI5AWwKzQ49+nMQpiI6hD4H01fW1pa +sVC2pJ8G+TWxG6CmGsGoBNlsoilzGwC6u4zZlue1CWdcet7mS4TMCi5L9TVQXh66 +CS5FQX/BXspjHnsOvCbZ/U+zLq9pi24FamG/t574Ym6mbgOwt2/lW3jZpLd6LZfk +5T+9KTr2DcPcb5VnwYrXVunv6s7eBrNPP+8= +-----END CERTIFICATE----- diff --git a/rethinkdb/tests/test_e2e.py b/rethinkdb/tests/test_e2e.py new file mode 100644 index 0000000000000..ccbc49f7b7d8a --- /dev/null +++ b/rethinkdb/tests/test_e2e.py @@ -0,0 +1,23 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Callable + +import pytest + +from datadog_checks.base.stubs.aggregator import AggregatorStub +from datadog_checks.rethinkdb import RethinkDBCheck + +from .common import E2E_METRICS + + +@pytest.mark.e2e +def test_check_ok(dd_agent_check): + # type: (Callable) -> None + aggregator = dd_agent_check(rate=True) # type: AggregatorStub + + for metric, _ in E2E_METRICS: + aggregator.assert_metric(metric) + + aggregator.assert_all_metrics_covered() + aggregator.assert_service_check('rethinkdb.can_connect', RethinkDBCheck.OK) diff --git a/rethinkdb/tests/test_integration.py b/rethinkdb/tests/test_integration.py new file mode 100644 index 0000000000000..2be221ddd6373 --- /dev/null +++ b/rethinkdb/tests/test_integration.py @@ -0,0 +1,163 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import copy +from typing import ContextManager, Set + +import mock +import pytest +import rethinkdb + +from datadog_checks.base.stubs.aggregator import AggregatorStub +from datadog_checks.base.stubs.datadog_agent import DatadogAgentStub +from datadog_checks.base.types import ServiceCheckStatus +from datadog_checks.rethinkdb import RethinkDBCheck +from datadog_checks.rethinkdb.types import Instance + +from .assertions import assert_metrics, assert_service_checks +from .cluster import temporarily_disconnect_server +from .common import ( + HEROES_TABLE_SERVERS, + MALFORMED_VERSION_STRING_PARAMS, + RAW_VERSION, + SERVER_PORTS, + TLS_CLIENT_CERT, + TLS_SERVER, +) +from .types import ServerName + +try: + from contextlib import nullcontext # type: ignore +except ImportError: + from contextlib2 import nullcontext + + +@pytest.mark.integration +@pytest.mark.usefixtures('dd_environment') +class TestCheck: + METRICS_COLLECTION_MOCK_TARGET = 'datadog_checks.rethinkdb.check.RethinkDBCheck.collect_metrics' + + def run_test( + self, aggregator, instance, check_context=None, connect_status=RethinkDBCheck.OK, disconnected_servers=None + ): + # type: (AggregatorStub, Instance, ContextManager[None], ServiceCheckStatus, Set[ServerName]) -> None + check = RethinkDBCheck('rethinkdb', {}, [instance]) + + with check_context if check_context is not None else nullcontext(): + check.check(instance) + + if connect_status == RethinkDBCheck.OK: + assert_metrics( + aggregator, + is_proxy=instance['port'] == SERVER_PORTS['proxy'], + disconnected_servers=disconnected_servers, + ) + aggregator.assert_all_metrics_covered() + + assert_service_checks( + aggregator, instance, connect_status=connect_status, disconnected_servers=disconnected_servers + ) + + def test_default(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + self.run_test(aggregator, instance) + + def test_connect_proxy_ok(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + instance = instance.copy() + instance['port'] = SERVER_PORTS['proxy'] + self.run_test(aggregator, instance) + + def test_connect_tls_ok(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + instance = instance.copy() + instance['port'] = SERVER_PORTS[TLS_SERVER] + instance['tls_ca_cert'] = TLS_CLIENT_CERT + self.run_test(aggregator, instance) + + def test_no_credentials_ok(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + instance = instance.copy() + + # RethinkDB will default to 'admin' w/o password. + # Should work assuming admin account in our test cluster doesn't have a password. + instance.pop('username') + instance.pop('password') + + self.run_test(aggregator, instance) + + @pytest.mark.parametrize('server_with_data', list(HEROES_TABLE_SERVERS)) + def test_disconnected_data_server_ok(self, aggregator, instance, server_with_data): + # type: (AggregatorStub, Instance, ServerName) -> None + # Simulate the scenario where one of the servers in the cluster is down, but not the one we're + # connecting to. + self.run_test( + aggregator, + instance, + check_context=temporarily_disconnect_server(server_with_data), + disconnected_servers={server_with_data}, + ) + + def test_connection_failure(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + instance = copy.deepcopy(instance) + instance['host'] = 'doesnotexist' + self.run_test( + aggregator, + instance, + check_context=pytest.raises(rethinkdb.errors.ReqlDriverError), + connect_status=RethinkDBCheck.CRITICAL, + ) + + def test_metric_collection_failure(self, aggregator, instance): + # type: (AggregatorStub, Instance) -> None + class Failure(Exception): + pass + + with mock.patch(self.METRICS_COLLECTION_MOCK_TARGET, side_effect=Failure): + self.run_test( + aggregator, instance, check_context=pytest.raises(Failure), connect_status=RethinkDBCheck.CRITICAL + ) + + +@pytest.mark.integration +@pytest.mark.usefixtures('dd_environment') +class TestVersionMetadata: + VERSION_MOCK_TARGET = 'datadog_checks.rethinkdb.operations.get_connected_server_raw_version' + + def run_test(self, instance, datadog_agent, metadata): + # type: (Instance, DatadogAgentStub, dict) -> None + check_id = 'test' + check = RethinkDBCheck('rethinkdb', {}, [instance]) + check.check_id = check_id + check.check(instance) + datadog_agent.assert_metadata(check_id, metadata) + + @pytest.mark.skipif(not RAW_VERSION, reason='Requires RAW_VERSION to be set') + def test_default(self, instance, datadog_agent): + # type: (Instance, DatadogAgentStub) -> None + raw_version = RAW_VERSION + version, _, build = raw_version.partition('~') + major, minor, patch = version.split('.') + metadata = { + 'version.scheme': 'semver', + 'version.major': major, + 'version.minor': minor, + 'version.patch': patch, + 'version.raw': raw_version, + } + + self.run_test(instance, datadog_agent, metadata=metadata) + + @pytest.mark.integration + @pytest.mark.parametrize('malformed_version_string', MALFORMED_VERSION_STRING_PARAMS) + def test_malformed(self, instance, aggregator, datadog_agent, malformed_version_string): + # type: (Instance, AggregatorStub, DatadogAgentStub, str) -> None + with mock.patch(self.VERSION_MOCK_TARGET, return_value=malformed_version_string): + self.run_test(instance, datadog_agent, metadata={}) + + @pytest.mark.integration + def test_failure(self, instance, aggregator, datadog_agent): + # type: (Instance, AggregatorStub, DatadogAgentStub) -> None + with mock.patch(self.VERSION_MOCK_TARGET, side_effect=ValueError('Oops!')): + self.run_test(instance, datadog_agent, metadata={}) diff --git a/rethinkdb/tests/types.py b/rethinkdb/tests/types.py new file mode 100644 index 0000000000000..ea61bd73ab931 --- /dev/null +++ b/rethinkdb/tests/types.py @@ -0,0 +1,6 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Literal + +ServerName = Literal['server0', 'server1', 'server2', 'proxy'] diff --git a/rethinkdb/tests/unit/__init__.py b/rethinkdb/tests/unit/__init__.py new file mode 100644 index 0000000000000..46dd167dcde48 --- /dev/null +++ b/rethinkdb/tests/unit/__init__.py @@ -0,0 +1,3 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) diff --git a/rethinkdb/tests/unit/document_db/test_query.py b/rethinkdb/tests/unit/document_db/test_query.py new file mode 100644 index 0000000000000..29fc40197426c --- /dev/null +++ b/rethinkdb/tests/unit/document_db/test_query.py @@ -0,0 +1,160 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from collections import OrderedDict +from typing import Iterator, List, Tuple + +import pytest +from six import PY3 + +from datadog_checks.rethinkdb.document_db import DocumentQuery, transformers + +pytestmark = pytest.mark.unit + + +def test_document_query(): + # type: () -> None + """ + A realistic unit test demonstrating the usage of `DocumentQuery`. + """ + + PRODUCTS_COLLECTION = [ + # NOTE: use ordered dicts so that order of submitted metrics is deterministic on Python 2 too. + OrderedDict( + ( + ('name', 'T-Shirt'), + ('category', 'clothing'), + ('sales', {'sales_per_day': 100, 'sales_total': 10000}), + ('locations', [{'name': 'London', 'stock': 1200}, {'name': 'Paris', 'stock': 700}]), + ('total_sales_per_location', OrderedDict((('london', 2000), ('paris', 8000)))), + ), + ), + OrderedDict( + ( + ('name', 'Laptop'), + ('category', 'high-tech'), + ('sales', {'sales_per_day': 5, 'sales_total': 400}), + ('locations', [{'name': 'New York', 'stock': 150}]), + ('total_sales_per_location', {'new-york': 400}), + ) + ), + ] + + def get_data_from_db(conn): + # type: (dict) -> Iterator[Tuple[dict, List[str]]] + for product in PRODUCTS_COLLECTION: + tags = ['category:{}'.format(product['category']), 'server:{}'.format(conn['server'])] + yield product, tags + + query = DocumentQuery( + source=get_data_from_db, + name='test', + prefix='products', + # Metrics obtained from a nested JSON key lookup (aka path lookup). + metrics=[ + {'type': 'gauge', 'path': 'sales.sales_per_day'}, + {'type': 'monotonic_count', 'path': 'sales.sales_total'}, + {'type': 'gauge', 'path': 'locations', 'transformer': transformers.length}, + ], + # Metrics for each object in an array, tagged by the index in the array. + enumerations=[ + {'path': 'locations', 'index_tag': 'location_index', 'metrics': [{'type': 'gauge', 'path': 'stock'}]} + ], + # Metrics from the result of a groupby() operation (aggregation). + groups=[{'type': 'gauge', 'path': 'total_sales_per_location', 'key_tag': 'location'}], + ) + + conn = {'server': 'example'} + metrics = list(query.run(conn=conn)) + + assert metrics == [ + # -- T-Shirt -- + # Metrics + { + 'type': 'gauge', + 'name': 'products.sales.sales_per_day', + 'value': 100, + 'tags': ['category:clothing', 'server:example'], + }, + { + 'type': 'monotonic_count', + 'name': 'products.sales.sales_total', + 'value': 10000, + 'tags': ['category:clothing', 'server:example'], + }, + {'type': 'gauge', 'name': 'products.locations', 'value': 2, 'tags': ['category:clothing', 'server:example']}, + # Enumerations + { + 'type': 'gauge', + 'name': 'products.locations.stock', + 'value': 1200, + 'tags': ['category:clothing', 'server:example', 'location_index:0'], + }, + { + 'type': 'gauge', + 'name': 'products.locations.stock', + 'value': 700, + 'tags': ['category:clothing', 'server:example', 'location_index:1'], + }, + # Groups + { + 'type': 'gauge', + 'name': 'products.total_sales_per_location', + 'value': 2000, + 'tags': ['category:clothing', 'server:example', 'location:london'], + }, + { + 'type': 'gauge', + 'name': 'products.total_sales_per_location', + 'value': 8000, + 'tags': ['category:clothing', 'server:example', 'location:paris'], + }, + # -- Laptop -- + # Metrics + { + 'type': 'gauge', + 'name': 'products.sales.sales_per_day', + 'value': 5, + 'tags': ['category:high-tech', 'server:example'], + }, + { + 'type': 'monotonic_count', + 'name': 'products.sales.sales_total', + 'value': 400, + 'tags': ['category:high-tech', 'server:example'], + }, + {'type': 'gauge', 'name': 'products.locations', 'value': 1, 'tags': ['category:high-tech', 'server:example']}, + # Enumerations + { + 'type': 'gauge', + 'name': 'products.locations.stock', + 'value': 150, + 'tags': ['category:high-tech', 'server:example', 'location_index:0'], + }, + # Groups + { + 'type': 'gauge', + 'name': 'products.total_sales_per_location', + 'value': 400, + 'tags': ['category:high-tech', 'server:example', 'location:new-york'], + }, + ] + + +def test_document_query_empty(): + # type: () -> None + def get_data(): + # type: () -> Iterator[Tuple[dict, List[str]]] + yield {}, [] + + query = DocumentQuery(source=get_data, name='test', prefix='dogs') + metrics = list(query.run()) + assert metrics == [] + + +@pytest.mark.skipif( + not PY3, reason='Assertions fail randomly due to Python 2 dicts not being ordered (example should stay simple)' +) +def test_example(): + # type: () -> None + import datadog_checks.rethinkdb.document_db._example # noqa: F401 diff --git a/rethinkdb/tests/unit/document_db/test_transformers.py b/rethinkdb/tests/unit/document_db/test_transformers.py new file mode 100644 index 0000000000000..5b39de267e90e --- /dev/null +++ b/rethinkdb/tests/unit/document_db/test_transformers.py @@ -0,0 +1,11 @@ +import datetime as dt + +import pytz + +from datadog_checks.rethinkdb.document_db import transformers + + +def test_to_time_elapsed(): + # type: () -> None + one_day_seconds = 3600 * 24 + transformers.to_time_elapsed(dt.datetime.now(pytz.utc) - dt.timedelta(days=1)) == one_day_seconds diff --git a/rethinkdb/tests/unit/document_db/test_utils.py b/rethinkdb/tests/unit/document_db/test_utils.py new file mode 100644 index 0000000000000..929e86fa07570 --- /dev/null +++ b/rethinkdb/tests/unit/document_db/test_utils.py @@ -0,0 +1,59 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Any + +import pytest + +from datadog_checks.rethinkdb.document_db.utils import dotted_join, lookup_dotted + +pytestmark = pytest.mark.unit + + +@pytest.mark.parametrize( + 'value, output', + [ + ((), ''), + (('foo',), 'foo'), + (('foo', 'bar'), 'foo.bar'), + (('foo', 'bar', 'baz'), 'foo.bar.baz'), + (('foo', 'bar', ''), 'foo.bar'), + (('foo', '', 'baz'), 'foo.baz'), + (('', 'bar', 'baz'), 'bar.baz'), + ], +) +def test_dotted_join(value, output): + # type: (tuple, str) -> None + assert dotted_join(value) == output + + +@pytest.mark.parametrize( + 'dct, path, output', + [ + ({}, '', {}), + ({'tables': 10}, 'tables', 10), + ({'tables': {'reads_per_sec': 500}}, 'tables.reads_per_sec', 500), + ({'tables': {'all': ['heroes']}}, 'tables.all', ['heroes']), + ({}, '', {}), + ], +) +def test_lookup_dotted(dct, path, output): + # type: (dict, str, Any) -> None + assert lookup_dotted(dct, path) == output + + +@pytest.mark.parametrize( + 'value, path', + [ + pytest.param([], 'test', id='root-not-a-mapping'), + pytest.param(True, 'test', id='root-not-a-mapping'), + pytest.param({'tables': 10}, 'tables.total', id='node-not-a-mapping'), + pytest.param({}, 'unknown', id='key-does-not-exist'), + pytest.param({'tables': {'total': 10}}, 'tables.unknown', id='key-does-not-exist'), + pytest.param({'tables.total': 10}, 'tables.total', id='dotted-key-not-supported'), + ], +) +def test_lookup_dotted_invalid(value, path): + # type: (Any, str) -> None + with pytest.raises(ValueError): + lookup_dotted(value, path) diff --git a/rethinkdb/tests/unit/test_config.py b/rethinkdb/tests/unit/test_config.py new file mode 100644 index 0000000000000..f2605a42497dd --- /dev/null +++ b/rethinkdb/tests/unit/test_config.py @@ -0,0 +1,57 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Any + +import pytest + +from datadog_checks.base import ConfigurationError +from datadog_checks.rethinkdb.config import Config +from datadog_checks.rethinkdb.types import Instance + +pytestmark = pytest.mark.unit + + +def test_default_config(): + # type: () -> None + config = Config() + assert config.host == 'localhost' + assert config.port == 28015 + assert config.user is None + assert config.tls_ca_cert is None + assert config.tags == [] + + +@pytest.mark.parametrize('port_28016,', [28016, '28016']) +@pytest.mark.parametrize('min_collection_interval_10', [10, '10', '10.0']) +def test_config(port_28016, min_collection_interval_10): + # type: (Any, Any) -> None + instance = { + 'host': '192.168.121.1', + 'port': port_28016, + 'username': 'datadog-agent', + 'password': 's3kr3t', + 'tls_ca_cert': '/path/to/client.cert', + 'tags': ['env:testing'], + } # type: Instance + + config = Config(instance) + assert config.host == '192.168.121.1' + assert config.port == 28016 + assert config.user == 'datadog-agent' + assert config.tls_ca_cert == '/path/to/client.cert' + assert config.tags == ['env:testing'] + + +@pytest.mark.parametrize('value', [42, True, object()]) +def test_invalid_host(value): + # type: (Any) -> None + with pytest.raises(ConfigurationError): + Config(instance={'host': value}) + + +@pytest.mark.parametrize('value', [-28016, '280.16', 'true', object()]) +def test_invalid_port(value): + # type: (Any) -> None + with pytest.raises(ConfigurationError): + Config(instance={'port': value}) diff --git a/rethinkdb/tests/unit/test_version.py b/rethinkdb/tests/unit/test_version.py new file mode 100644 index 0000000000000..39a8d22558547 --- /dev/null +++ b/rethinkdb/tests/unit/test_version.py @@ -0,0 +1,33 @@ +# (C) Datadog, Inc. 2020-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import pytest + +from datadog_checks.rethinkdb.version import parse_version + +from ..common import MALFORMED_VERSION_STRING_PARAMS + + +@pytest.mark.unit +@pytest.mark.parametrize( + 'version_string, expected_version', + [ + pytest.param('rethinkdb 2.4.0~0bionic (CLANG 6.0.0 (tags/RELEASE_600/final))', '2.4.0', id='2.4'), + pytest.param('rethinkdb 2.4.0-beta~0bionic (debug)', '2.4.0', id='2.4-beta'), + pytest.param('rethinkdb 2.4.0~0bionic (debug)', '2.4.0', id='2.4-debug'), + pytest.param('rethinkdb 2.3.3~0jessie (GCC 4.9.2)', '2.3.3', id='2.3'), + pytest.param('rethinkdb 2.3.6 (GCC 4.9.2)', '2.3.6', id='2.3-no-build'), + pytest.param('rethinkdb 2.3.3', '2.3.3', id='no-compilation-string'), + ], +) +def test_parse_version(version_string, expected_version): + # type: (str, str) -> None + assert parse_version(version_string) == expected_version + + +@pytest.mark.unit +@pytest.mark.parametrize('version_string', MALFORMED_VERSION_STRING_PARAMS) +def test_parse_malformed_version(version_string): + # type: (str) -> None + with pytest.raises(ValueError): + parse_version(version_string) diff --git a/rethinkdb/tox.ini b/rethinkdb/tox.ini new file mode 100644 index 0000000000000..383fc0ceefe8d --- /dev/null +++ b/rethinkdb/tox.ini @@ -0,0 +1,30 @@ +[tox] +minversion = 2.0 +skip_missing_interpreters = true +basepython = py38 +envlist = + py{27,38}-{2.3,latest} + +[testenv] +dd_check_style = true +dd_check_types = true +dd_mypy_args = --py2 --disallow-untyped-defs datadog_checks/ tests/ +description = + py{27,38}: e2e ready +usedevelop = true +platform = linux|darwin|win32 +deps = + -e../datadog_checks_base[deps] + -rrequirements-dev.txt +passenv = + DOCKER* + COMPOSE* +commands = + pip install -r requirements.in + pytest -v {posargs} +setenv = + ; Can't support lower 2.3 patch versions due to: https://github.com/rethinkdb/rethinkdb/issues/6108 + 2.3: RETHINKDB_IMAGE = rethinkdb:2.3.6 + 2.3: RETHINKDB_RAW_VERSION = 2.3.6 + latest: RETHINKDB_IMAGE = rethinkdb:latest + latest: RETHINKDB_RAW_VERSION =