diff --git a/.github/containers/Dockerfile b/.github/containers/Dockerfile index 8d3a187cc6..0c15dff18c 100644 --- a/.github/containers/Dockerfile +++ b/.github/containers/Dockerfile @@ -55,10 +55,22 @@ RUN export DEBIAN_FRONTEND=noninteractive && \ zlib1g-dev && \ rm -rf /var/lib/apt/lists/* +# Build librdkafka from source +ARG LIBRDKAFKA_VERSION=2.1.1 +RUN cd /tmp && \ + wget https://github.com/confluentinc/librdkafka/archive/refs/tags/v${LIBRDKAFKA_VERSION}.zip -O ./librdkafka.zip && \ + unzip ./librdkafka.zip && \ + rm ./librdkafka.zip && \ + cd ./librdkafka-${LIBRDKAFKA_VERSION} && \ + ./configure && \ + make all install && \ + cd /tmp && \ + rm -rf ./librdkafka-${LIBRDKAFKA_VERSION} + # Setup ODBC config -RUN sed -i 's/Driver=psqlodbca.so/Driver=\/usr\/lib\/x86_64-linux-gnu\/odbc\/psqlodbca.so/g' /etc/odbcinst.ini && \ - sed -i 's/Driver=psqlodbcw.so/Driver=\/usr\/lib\/x86_64-linux-gnu\/odbc\/psqlodbcw.so/g' /etc/odbcinst.ini && \ - sed -i 's/Setup=libodbcpsqlS.so/Setup=\/usr\/lib\/x86_64-linux-gnu\/odbc\/libodbcpsqlS.so/g' /etc/odbcinst.ini +RUN sed -i 's|Driver=psqlodbca.so|Driver=/usr/lib/x86_64-linux-gnu/odbc/psqlodbca.so|g' /etc/odbcinst.ini && \ + sed -i 's|Driver=psqlodbcw.so|Driver=/usr/lib/x86_64-linux-gnu/odbc/psqlodbcw.so|g' /etc/odbcinst.ini && \ + sed -i 's|Setup=libodbcpsqlS.so|Setup=/usr/lib/x86_64-linux-gnu/odbc/libodbcpsqlS.so|g' /etc/odbcinst.ini # Set the locale RUN locale-gen --no-purge en_US.UTF-8 diff --git a/.github/containers/Makefile b/.github/containers/Makefile index 8a72f4c458..35081f7386 100644 --- a/.github/containers/Makefile +++ b/.github/containers/Makefile @@ -22,7 +22,9 @@ default: test .PHONY: build build: @# Perform a shortened build for testing - @docker build --build-arg='PYTHON_VERSIONS=3.10 2.7' $(MAKEFILE_DIR) -t ghcr.io/newrelic/newrelic-python-agent-ci:local + @docker build $(MAKEFILE_DIR) \ + -t ghcr.io/newrelic/newrelic-python-agent-ci:local \ + --build-arg='PYTHON_VERSIONS=3.10 2.7' .PHONY: test test: build @@ -38,7 +40,9 @@ run: build @docker run --rm -it \ --mount type=bind,source="$(REPO_ROOT)",target=/home/github/python-agent \ --workdir=/home/github/python-agent \ + --add-host=host.docker.internal:host-gateway \ -e NEW_RELIC_HOST="${NEW_RELIC_HOST}" \ -e NEW_RELIC_LICENSE_KEY="${NEW_RELIC_LICENSE_KEY}" \ -e NEW_RELIC_DEVELOPER_MODE="${NEW_RELIC_DEVELOPER_MODE}" \ + -e GITHUB_ACTIONS="true" \ ghcr.io/newrelic/newrelic-python-agent-ci:local /bin/bash diff --git a/.github/containers/install-python.sh b/.github/containers/install-python.sh index 92184df3a9..2031e2d927 100755 --- a/.github/containers/install-python.sh +++ b/.github/containers/install-python.sh @@ -13,10 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -e - -SCRIPT_DIR=$(dirname "$0") -PIP_REQUIREMENTS=$(cat /requirements.txt) +set -eo pipefail main() { # Coerce space separated string to array @@ -50,7 +47,7 @@ main() { pyenv global ${PYENV_VERSIONS[@]} # Install dependencies for main python installation - pyenv exec pip install --upgrade $PIP_REQUIREMENTS + pyenv exec pip install --upgrade -r /requirements.txt } main diff --git a/.github/scripts/retry.sh b/.github/scripts/retry.sh index b5d51f77b5..079798a72d 100755 --- a/.github/scripts/retry.sh +++ b/.github/scripts/retry.sh @@ -39,4 +39,4 @@ for i in $(seq 1 $retries); do done # Exit with status code of wrapped command -exit $? +exit $result diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 52576c155b..59e55d9b1b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -38,7 +38,7 @@ jobs: - elasticsearchserver08 - gearman - grpc - #- kafka + - kafka - memcached - mongodb - mssql @@ -119,7 +119,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -164,7 +164,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -209,7 +209,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -269,7 +269,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -332,7 +332,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -395,7 +395,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -453,7 +453,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -513,7 +513,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -571,7 +571,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -619,85 +619,75 @@ jobs: path: ./**/.coverage.* retention-days: 1 - # kafka: - # env: - # TOTAL_GROUPS: 4 - - # strategy: - # fail-fast: false - # matrix: - # group-number: [1, 2, 3, 4] - - # runs-on: ubuntu-20.04 - # container: - # image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 - # options: >- - # --add-host=host.docker.internal:host-gateway - # timeout-minutes: 30 - - # services: - # zookeeper: - # image: bitnami/zookeeper:3.7 - # env: - # ALLOW_ANONYMOUS_LOGIN: yes - - # ports: - # - 2181:2181 - - # kafka: - # image: bitnami/kafka:3.2 - # ports: - # - 8080:8080 - # - 8081:8081 - # env: - # ALLOW_PLAINTEXT_LISTENER: yes - # KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - # KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true - # KAFKA_CFG_LISTENERS: L1://:8080,L2://:8081 - # KAFKA_CFG_ADVERTISED_LISTENERS: L1://127.0.0.1:8080,L2://kafka:8081, - # KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT - # KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L2 - - # steps: - # - uses: actions/checkout@v3 - - # - name: Fetch git tags - # run: | - # git config --global --add safe.directory "$GITHUB_WORKSPACE" - # git fetch --tags origin - - # # Special case packages - # - name: Install librdkafka-dev - # run: | - # # Use lsb-release to find the codename of Ubuntu to use to install the correct library name - # sudo apt-get update - # sudo ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime - # sudo apt-get install -y wget gnupg2 software-properties-common - # sudo wget -qO - https://packages.confluent.io/deb/7.2/archive.key | sudo apt-key add - - # sudo add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" - # sudo apt-get update - # sudo apt-get install -y librdkafka-dev/$(lsb_release -c | cut -f 2) - - # - name: Get Environments - # id: get-envs - # run: | - # echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> $GITHUB_OUTPUT - # env: - # GROUP_NUMBER: ${{ matrix.group-number }} - - # - name: Test - # run: | - # tox -vv -e ${{ steps.get-envs.outputs.envs }} - # env: - # TOX_PARALLEL_NO_SPINNER: 1 - # PY_COLORS: 0 - - # - name: Upload Coverage Artifacts - # uses: actions/upload-artifact@v3 - # with: - # name: coverage-${{ github.job }}-${{ strategy.job-index }} - # path: ./**/.coverage.* - # retention-days: 1 + kafka: + env: + TOTAL_GROUPS: 4 + + strategy: + fail-fast: false + matrix: + group-number: [1, 2, 3, 4] + + runs-on: ubuntu-20.04 + container: + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 + options: >- + --add-host=host.docker.internal:host-gateway + timeout-minutes: 30 + + services: + zookeeper: + image: bitnami/zookeeper:3.7 + env: + ALLOW_ANONYMOUS_LOGIN: yes + + ports: + - 2181:2181 + + kafka: + image: bitnami/kafka:3.2 + ports: + - 8080:8080 + - 8082:8082 + - 8083:8083 + env: + KAFKA_ENABLE_KRAFT: no + ALLOW_PLAINTEXT_LISTENER: yes + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true + KAFKA_CFG_LISTENERS: L1://:8082,L2://:8083,L3://:8080 + KAFKA_CFG_ADVERTISED_LISTENERS: L1://host.docker.internal:8082,L2://host.docker.internal:8083,L3://kafka:8080 + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT,L3:PLAINTEXT + KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L3 + + steps: + - uses: actions/checkout@v3 + + - name: Fetch git tags + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + git fetch --tags origin + + - name: Get Environments + id: get-envs + run: | + echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> $GITHUB_OUTPUT + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + PY_COLORS: 0 + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@v3 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + retention-days: 1 mongodb: env: @@ -710,7 +700,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -768,7 +758,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -828,7 +818,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 @@ -889,7 +879,7 @@ jobs: runs-on: ubuntu-20.04 container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-b0ffe8bdbb28ba0579377076ad680054da8fbc28 + image: ghcr.io/newrelic/newrelic-python-agent-ci:sha-52dab3b91d91005b8ef657fba3fe08a5fbfa23b1 options: >- --add-host=host.docker.internal:host-gateway timeout-minutes: 30 diff --git a/codecov.yml b/codecov.yml index 8ed9c30200..c2441c9700 100644 --- a/codecov.yml +++ b/codecov.yml @@ -19,8 +19,5 @@ ignore: - "newrelic/hooks/database_oursql.py" - "newrelic/hooks/database_psycopg2ct.py" - "newrelic/hooks/datastore_umemcache.py" - # Temporarily disable kafka - - "newrelic/hooks/messagebroker_kafkapython.py" - - "newrelic/hooks/messagebroker_confluentkafka.py" - "newrelic/admin/*" - "newrelic/console.py" diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py index e29596d559..fa86b6b3c0 100644 --- a/tests/messagebroker_confluentkafka/conftest.py +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -84,7 +84,7 @@ def producer(topic, client_type, json_serializer): @pytest.fixture(scope="function") -def consumer(topic, producer, client_type, json_deserializer): +def consumer(group_id, topic, producer, client_type, json_deserializer): from confluent_kafka import Consumer, DeserializingConsumer if client_type == "cimpl": @@ -93,7 +93,7 @@ def consumer(topic, producer, client_type, json_deserializer): "bootstrap.servers": BROKER, "auto.offset.reset": "earliest", "heartbeat.interval.ms": 1000, - "group.id": "test", + "group.id": group_id, } ) elif client_type == "serializer_function": @@ -102,7 +102,7 @@ def consumer(topic, producer, client_type, json_deserializer): "bootstrap.servers": BROKER, "auto.offset.reset": "earliest", "heartbeat.interval.ms": 1000, - "group.id": "test", + "group.id": group_id, "value.deserializer": lambda v, c: json.loads(v.decode("utf-8")), "key.deserializer": lambda v, c: json.loads(v.decode("utf-8")) if v is not None else None, } @@ -113,7 +113,7 @@ def consumer(topic, producer, client_type, json_deserializer): "bootstrap.servers": BROKER, "auto.offset.reset": "earliest", "heartbeat.interval.ms": 1000, - "group.id": "test", + "group.id": group_id, "value.deserializer": json_deserializer, "key.deserializer": json_deserializer, } @@ -181,6 +181,11 @@ def topic(): admin.delete_topics(new_topics) +@pytest.fixture(scope="session") +def group_id(): + return str(uuid.uuid4()) + + @pytest.fixture() def send_producer_message(topic, producer, serialize, client_type): callback_called = [] diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index becef31a0f..de12f5830e 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -86,7 +86,7 @@ def producer(client_type, json_serializer, json_callable_serializer): @pytest.fixture(scope="function") -def consumer(topic, producer, client_type, json_deserializer, json_callable_deserializer): +def consumer(group_id, topic, producer, client_type, json_deserializer, json_callable_deserializer): if client_type == "no_serializer": consumer = kafka.KafkaConsumer( topic, @@ -94,7 +94,7 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese auto_offset_reset="earliest", consumer_timeout_ms=100, heartbeat_interval_ms=1000, - group_id="test", + group_id=group_id, ) elif client_type == "serializer_function": consumer = kafka.KafkaConsumer( @@ -105,7 +105,7 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese auto_offset_reset="earliest", consumer_timeout_ms=100, heartbeat_interval_ms=1000, - group_id="test", + group_id=group_id, ) elif client_type == "callable_object": consumer = kafka.KafkaConsumer( @@ -116,7 +116,7 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese auto_offset_reset="earliest", consumer_timeout_ms=100, heartbeat_interval_ms=1000, - group_id="test", + group_id=group_id, ) elif client_type == "serializer_object": consumer = kafka.KafkaConsumer( @@ -127,7 +127,7 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese auto_offset_reset="earliest", consumer_timeout_ms=100, heartbeat_interval_ms=1000, - group_id="test", + group_id=group_id, ) yield consumer @@ -202,6 +202,11 @@ def topic(): admin.delete_topics([topic]) +@pytest.fixture(scope="session") +def group_id(): + return str(uuid.uuid4()) + + @pytest.fixture() def send_producer_message(topic, producer, serialize): def _test(): diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index ef9a3419c1..b095c09121 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -247,12 +247,13 @@ def kafka_settings(): 2. Github Actions """ - host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "localhost" + host = "host.docker.internal" if "GITHUB_ACTIONS" in os.environ else "127.0.0.1" + base_port = 8082 if "GITHUB_ACTIONS" in os.environ else 8080 instances = 2 settings = [ { "host": host, - "port": 8080 + instance_num, + "port": base_port + instance_num, } for instance_num in range(instances) ] diff --git a/tox.ini b/tox.ini index 989c0334ed..abd4e68892 100644 --- a/tox.ini +++ b/tox.ini @@ -160,7 +160,7 @@ envlist = python-framework_tornado-{py38,py39,py310,py311}-tornadomaster, rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy27,pypy38}-pika0.13, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,pypy38}-pikalatest, - kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310,py311}-confluentkafkalatest, + kafka-messagebroker_confluentkafka-{py37,py38,py39,py310,py311}-confluentkafkalatest, kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108},