From 2ccf8fb05d64d56739b94b042d552212d8fe1771 Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 23 Mar 2022 13:54:11 -0400 Subject: [PATCH 01/17] Add gssapi dep --- .../datadog_checks/base/data/agent_requirements.in | 2 ++ kafka_consumer/pyproject.toml | 2 ++ kafka_consumer/tests/test_kafka_consumer.py | 11 +++++++++++ 3 files changed, 15 insertions(+) diff --git a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in index 0462cd945f3bf..85b8fd5093bba 100644 --- a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in +++ b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in @@ -26,6 +26,8 @@ enum34==1.1.10; python_version < "3.0" foundationdb==6.3.22; python_version > "3.0" futures==3.3.0; python_version < "3.0" gearman==2.0.2; sys_platform != "win32" and python_version < "3.0" +gssapi==1.6.1; python_version < "3.0" +gssapi==1.7.3; python_version > "3.0" immutables==0.16; python_version > "3.0" in-toto==1.0.1 ipaddress==1.0.23; python_version < "3.0" diff --git a/kafka_consumer/pyproject.toml b/kafka_consumer/pyproject.toml index d15d064c4a8b8..d1535c70c5e4e 100644 --- a/kafka_consumer/pyproject.toml +++ b/kafka_consumer/pyproject.toml @@ -37,6 +37,8 @@ dynamic = [ [project.optional-dependencies] deps = [ + "gssapi==1.6.1; python_version < '3.0'", + "gssapi==1.7.3; python_version > '3.0'", "kafka-python==2.0.2", "kazoo==2.8.0", ] diff --git a/kafka_consumer/tests/test_kafka_consumer.py b/kafka_consumer/tests/test_kafka_consumer.py index da59988469add..02aee62ad2a98 100644 --- a/kafka_consumer/tests/test_kafka_consumer.py +++ b/kafka_consumer/tests/test_kafka_consumer.py @@ -43,6 +43,17 @@ def test_uses_new_implementation_when_new_version_specified(kafka_instance): assert isinstance(kafka_consumer_check.sub_check, NewKafkaConsumerCheck) +@pytest.mark.unit +def test_gssapi(kafka_instance, dd_run_check): + instance = copy.deepcopy(kafka_instance) + instance['kafka_client_api_version'] = '0.10.2' + instance['sasl_mechanism'] = 'GSSAPI' + instance['security_protocol'] = 'SASL_PLAINTEXT' + instance['sasl_kerberos_service_name'] = 'kafka' + kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance]) + dd_run_check(kafka_consumer_check) + + @pytest.mark.unit def test_tls_config_ok(kafka_instance_tls): with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: From db3f56f75ccbdbb12d608ec7bd32bafef7f447fd Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 23 Mar 2022 14:15:26 -0400 Subject: [PATCH 02/17] Fix quote --- .../datadog_checks/base/data/agent_requirements.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in index 5ae70e56b5910..0b3cf95f4f82c 100644 --- a/datadog_checks_base/datadog_checks/base/data/agent_requirements.in +++ b/datadog_checks_base/datadog_checks/base/data/agent_requirements.in @@ -27,7 +27,7 @@ foundationdb==6.3.22; python_version > '3.0' futures==3.3.0; python_version < '3.0' gearman==2.0.2; sys_platform != 'win32' and python_version < '3.0' gssapi==1.6.1; python_version < '3.0' -gssapi==1.7.3; python_version > 3.0' +gssapi==1.7.3; python_version > '3.0' immutables==0.16; python_version > '3.0' in-toto==1.0.1 ipaddress==1.0.23; python_version < '3.0' From 02a3e393516c6072a40cb2c3937313c87f2946ab Mon Sep 17 00:00:00 2001 From: Sarah Date: Thu, 24 Mar 2022 14:07:00 -0400 Subject: [PATCH 03/17] Add install script --- .../scripts/kafka_consumer/55_install_kerberos.sh | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh diff --git a/.azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh new file mode 100644 index 0000000000000..1c21cc7c9265b --- /dev/null +++ b/.azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -ex + +sudo apt-get update +sudo apt-get install -y --no-install-recommends libkrb5-dev + +set +ex From 1467b328b001a55ee7353cac8f144df5e001bfb1 Mon Sep 17 00:00:00 2001 From: Sarah Date: Thu, 24 Mar 2022 14:19:22 -0400 Subject: [PATCH 04/17] add metadata --- kafka_consumer/tests/conftest.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/kafka_consumer/tests/conftest.py b/kafka_consumer/tests/conftest.py index d14cae807e19b..6b581252cd5fb 100644 --- a/kafka_consumer/tests/conftest.py +++ b/kafka_consumer/tests/conftest.py @@ -56,7 +56,16 @@ def dd_environment(mock_local_kafka_hosts_dns, e2e_instance): 'KAFKA_HOST': HOST_IP }, ): - yield e2e_instance, {'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')]} + yield e2e_instance, E2E_METADATA + + +E2E_METADATA = { + 'start_commands': [ + 'apt-get update', + 'apt-get install -y --no-install-recommends libkrb5-dev', + ], + 'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')], +} @pytest.fixture(scope='session') From 7e05b9182c83fa8283293ea3d07f7cc10e466cec Mon Sep 17 00:00:00 2001 From: Sarah Date: Thu, 24 Mar 2022 14:23:09 -0400 Subject: [PATCH 05/17] move to linux dir --- .../scripts/kafka_consumer/{ => linux}/55_install_kerberos.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .azure-pipelines/scripts/kafka_consumer/{ => linux}/55_install_kerberos.sh (100%) diff --git a/.azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/55_install_kerberos.sh similarity index 100% rename from .azure-pipelines/scripts/kafka_consumer/55_install_kerberos.sh rename to .azure-pipelines/scripts/kafka_consumer/linux/55_install_kerberos.sh From f22bc4607c6105f4c80c730b72e13d4ee8636c8a Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 10:56:55 -0400 Subject: [PATCH 06/17] rename and http --- .../linux/{55_install_kerberos.sh => 32_install_kerberos.sh} | 0 kafka_consumer/tox.ini | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename .azure-pipelines/scripts/kafka_consumer/linux/{55_install_kerberos.sh => 32_install_kerberos.sh} (100%) diff --git a/.azure-pipelines/scripts/kafka_consumer/linux/55_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh similarity index 100% rename from .azure-pipelines/scripts/kafka_consumer/linux/55_install_kerberos.sh rename to .azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh diff --git a/kafka_consumer/tox.ini b/kafka_consumer/tox.ini index a6f5bd6feeb32..c4b0f00875dba 100644 --- a/kafka_consumer/tox.ini +++ b/kafka_consumer/tox.ini @@ -20,7 +20,7 @@ passenv = COMPOSE* extras = deps deps = - -e../datadog_checks_base[deps] + -e../datadog_checks_base[deps,http] -rrequirements-dev.txt -e../datadog_checks_tests_helper commands = From 0df8a72ce14d5ae02e41ea85a5a6917e3bcd3906 Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 11:35:06 -0400 Subject: [PATCH 07/17] Remove install commands --- kafka_consumer/tests/conftest.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/kafka_consumer/tests/conftest.py b/kafka_consumer/tests/conftest.py index 6b581252cd5fb..d14cae807e19b 100644 --- a/kafka_consumer/tests/conftest.py +++ b/kafka_consumer/tests/conftest.py @@ -56,16 +56,7 @@ def dd_environment(mock_local_kafka_hosts_dns, e2e_instance): 'KAFKA_HOST': HOST_IP }, ): - yield e2e_instance, E2E_METADATA - - -E2E_METADATA = { - 'start_commands': [ - 'apt-get update', - 'apt-get install -y --no-install-recommends libkrb5-dev', - ], - 'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')], -} + yield e2e_instance, {'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')]} @pytest.fixture(scope='session') From dd0b36f3be51b3c72b69328056e8305abfcb28d6 Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 13:00:39 -0400 Subject: [PATCH 08/17] Update permissions --- .../scripts/kafka_consumer/linux/32_install_kerberos.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 .azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh diff --git a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh old mode 100644 new mode 100755 From 60797eecb19129b0f1141917b92825b6e5327de8 Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 13:19:15 -0400 Subject: [PATCH 09/17] catch exception and add comment --- kafka_consumer/tests/test_kafka_consumer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/kafka_consumer/tests/test_kafka_consumer.py b/kafka_consumer/tests/test_kafka_consumer.py index 02aee62ad2a98..7ec83972e7788 100644 --- a/kafka_consumer/tests/test_kafka_consumer.py +++ b/kafka_consumer/tests/test_kafka_consumer.py @@ -51,7 +51,10 @@ def test_gssapi(kafka_instance, dd_run_check): instance['security_protocol'] = 'SASL_PLAINTEXT' instance['sasl_kerberos_service_name'] = 'kafka' kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance]) - dd_run_check(kafka_consumer_check) + # assert the check doesn't fail with: + # Exception: Could not find main GSSAPI shared library. + with pytest.raises(Exception, match='^NoBrokersAvailable'): + dd_run_check(kafka_consumer_check) @pytest.mark.unit From bda77eee359b6c9d25b985af0d4e75f1a58e81ec Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 13:27:18 -0400 Subject: [PATCH 10/17] Change message --- kafka_consumer/tests/test_kafka_consumer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka_consumer/tests/test_kafka_consumer.py b/kafka_consumer/tests/test_kafka_consumer.py index 7ec83972e7788..ae19a7a1381bf 100644 --- a/kafka_consumer/tests/test_kafka_consumer.py +++ b/kafka_consumer/tests/test_kafka_consumer.py @@ -53,7 +53,7 @@ def test_gssapi(kafka_instance, dd_run_check): kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance]) # assert the check doesn't fail with: # Exception: Could not find main GSSAPI shared library. - with pytest.raises(Exception, match='^NoBrokersAvailable'): + with pytest.raises(Exception, match='^There was a problem collecting'): dd_run_check(kafka_consumer_check) From 6dff843f05162cfcdfaffda94e624f0912e81e21 Mon Sep 17 00:00:00 2001 From: Sarah Date: Tue, 29 Mar 2022 15:17:13 -0400 Subject: [PATCH 11/17] Update match --- kafka_consumer/tests/test_kafka_consumer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka_consumer/tests/test_kafka_consumer.py b/kafka_consumer/tests/test_kafka_consumer.py index ae19a7a1381bf..075322494d42c 100644 --- a/kafka_consumer/tests/test_kafka_consumer.py +++ b/kafka_consumer/tests/test_kafka_consumer.py @@ -53,7 +53,7 @@ def test_gssapi(kafka_instance, dd_run_check): kafka_consumer_check = KafkaCheck('kafka_consumer', {}, [instance]) # assert the check doesn't fail with: # Exception: Could not find main GSSAPI shared library. - with pytest.raises(Exception, match='^There was a problem collecting'): + with pytest.raises(Exception, match='check_version'): dd_run_check(kafka_consumer_check) From 8b78d1e463dd0574135a16f3075fa9a1f774faa0 Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 30 Mar 2022 14:41:45 -0400 Subject: [PATCH 12/17] Add license --- LICENSE-3rdparty.csv | 1 + .../datadog_checks/dev/tooling/commands/validate/licenses.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index bcfd5b8a3a4bb..3af061fb34a50 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -31,6 +31,7 @@ flup-py3,Vendor,BSD-3-Clause,Allan Saddi foundationdb,PyPI,Apache-2.0,FoundationDB futures,PyPI,PSF,Brian Quinlan gearman,PyPI,Apache-2.0,Matthew Tai +gssapi,PyPI,ISC,The Python GSSAPI Team immutables,PyPI,Apache-2.0,MagicStack Inc in-toto,PyPI,Apache-2.0,New York University: Secure Systems Lab ipaddress,PyPI,PSF,Philipp Hagemeister diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/licenses.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/licenses.py index 777eccb15b623..cd1c53d3b7f2a 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/licenses.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/licenses.py @@ -24,6 +24,8 @@ 'adodbapi': ['LGPL-2.1-only'], # https://github.com/rthalley/dnspython/blob/master/LICENSE 'dnspython': ['ISC'], + # https://github.com/pythongssapi/python-gssapi/blob/main/LICENSE.txt + 'gssapi': ['ISC'], # https://github.com/cannatag/ldap3/blob/dev/COPYING.txt 'ldap3': ['LGPL-3.0-only'], # https://github.com/paramiko/paramiko/blob/master/LICENSE From fdcb8fcc6eb51fd26856ba9b59e15d33f5ba9de2 Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 30 Mar 2022 15:07:10 -0400 Subject: [PATCH 13/17] install gcc --- .../scripts/kafka_consumer/linux/32_install_kerberos.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh index 1c21cc7c9265b..9aa68770aa527 100755 --- a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh +++ b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh @@ -3,6 +3,7 @@ set -ex sudo apt-get update +sudo apt install build-essential sudo apt-get install -y --no-install-recommends libkrb5-dev set +ex From 1f497fdb264b39f031c748c97356e46e422a8926 Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 30 Mar 2022 15:52:25 -0400 Subject: [PATCH 14/17] add start command --- kafka_consumer/tests/conftest.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/kafka_consumer/tests/conftest.py b/kafka_consumer/tests/conftest.py index d14cae807e19b..eaa464d4ece9a 100644 --- a/kafka_consumer/tests/conftest.py +++ b/kafka_consumer/tests/conftest.py @@ -56,7 +56,16 @@ def dd_environment(mock_local_kafka_hosts_dns, e2e_instance): 'KAFKA_HOST': HOST_IP }, ): - yield e2e_instance, {'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')]} + yield e2e_instance, E2E_METADATA + + +E2E_METADATA = { + 'custom_hosts': [('kafka1', '127.0.0.1'), ('kafka2', '127.0.0.1')], + 'start_commands': [ + 'apt-get update', + 'apt-get install -y build-essential', + ], +} @pytest.fixture(scope='session') From c786e42716accf8fcdd720b6cc2d12d5c2ccb939 Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 30 Mar 2022 17:10:42 -0400 Subject: [PATCH 15/17] Dont install --- .../scripts/kafka_consumer/linux/32_install_kerberos.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh index 9aa68770aa527..1c21cc7c9265b 100755 --- a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh +++ b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh @@ -3,7 +3,6 @@ set -ex sudo apt-get update -sudo apt install build-essential sudo apt-get install -y --no-install-recommends libkrb5-dev set +ex From 9cd32c5bb4a913647c7c4b45bff35c8fee56319a Mon Sep 17 00:00:00 2001 From: Sarah Date: Wed, 30 Mar 2022 17:35:25 -0400 Subject: [PATCH 16/17] Revert "Dont install" This reverts commit c786e42716accf8fcdd720b6cc2d12d5c2ccb939. --- .../scripts/kafka_consumer/linux/32_install_kerberos.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh index 1c21cc7c9265b..9aa68770aa527 100755 --- a/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh +++ b/.azure-pipelines/scripts/kafka_consumer/linux/32_install_kerberos.sh @@ -3,6 +3,7 @@ set -ex sudo apt-get update +sudo apt install build-essential sudo apt-get install -y --no-install-recommends libkrb5-dev set +ex From bc4c56df721f7598696025c96b42f7462da93301 Mon Sep 17 00:00:00 2001 From: Sarah Date: Thu, 31 Mar 2022 12:08:35 -0400 Subject: [PATCH 17/17] remove --- kafka_consumer/tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka_consumer/tox.ini b/kafka_consumer/tox.ini index 4192dc63620ae..32673ab9a0074 100644 --- a/kafka_consumer/tox.ini +++ b/kafka_consumer/tox.ini @@ -21,7 +21,7 @@ passenv = COMPOSE* extras = deps deps = - -e../datadog_checks_base[deps,http] + -e../datadog_checks_base[deps] -rrequirements-dev.txt -e../datadog_checks_tests_helper commands =