From 2f85e9f9572b63a8e55bac8dbe4855caa9d92c61 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 27 May 2024 22:40:42 +0200 Subject: [PATCH] Run tests for Providers also for Airflow 2.8 (#39606) This is a follow-up on #39513 to add support for running Provider tests against Airlfow 2.8 installed from PyPI. This change includes: * simplifying the way how we specify provider exclusions in tests * update to the unit test documentation describing testing * updating to latest pytest tooling in case older airflow version is installed (pulling the correct versions and correct packages for pytest extensions) * implementing 2.8 compatibility for the conftest/test common code * implementing 2.8 compatibility for provider tests that relied on 2.9+ behaviours --- .github/workflows/check-providers.yml | 6 +- Dockerfile.ci | 6 ++ contributing-docs/testing/unit_tests.rst | 13 ++- dev/breeze/doc/images/output-commands.svg | 42 ++++----- .../doc/images/output_testing_db-tests.svg | 78 ++++++++-------- .../doc/images/output_testing_db-tests.txt | 2 +- .../images/output_testing_non-db-tests.svg | 72 +++++++-------- .../images/output_testing_non-db-tests.txt | 2 +- .../doc/images/output_testing_tests.svg | 92 +++++++++---------- .../doc/images/output_testing_tests.txt | 2 +- .../commands/testing_commands.py | 4 +- .../src/airflow_breeze/global_constants.py | 19 ++-- pyproject.toml | 1 + scripts/docker/entrypoint_ci.sh | 6 ++ tests/conftest.py | 17 +++- .../common/io/operators/test_file_transfer.py | 1 - .../providers/common/io/xcom/test_backend.py | 17 +++- .../log/test_stackdriver_task_handler.py | 38 +++++--- .../providers/smtp/notifications/test_smtp.py | 8 +- tests/test_utils/db.py | 11 ++- 20 files changed, 245 insertions(+), 192 deletions(-) diff --git a/.github/workflows/check-providers.yml b/.github/workflows/check-providers.yml index 8bac79f6fd48f..622a67fea97a1 100644 --- a/.github/workflows/check-providers.yml +++ b/.github/workflows/check-providers.yml @@ -232,8 +232,10 @@ jobs: Remove incompatible Airflow ${{ matrix.airflow-version }}:Python ${{ matrix.python-version }} provider packages run: | - rm -vf ${{ matrix.remove-providers }} - working-directory: ./dist + for provider in ${{ matrix.remove-providers }}; do + echo "Removing incompatible provider: ${provider}" + rm -vf dist/apache_airflow_providers_${provider/./_}* + done if: matrix.remove-providers != '' - name: "Download airflow package: wheel" run: | diff --git a/Dockerfile.ci b/Dockerfile.ci index daba5aaab8888..1c9d9db3dd40b 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -988,6 +988,12 @@ function determine_airflow_to_use() { python "${IN_CONTAINER_DIR}/install_airflow_and_providers.py" # Some packages might leave legacy typing module which causes test issues pip uninstall -y typing || true + # Upgrade pytest and pytest extensions to latest version if they have been accidentally + # downgraded by constraints + pip install --upgrade pytest pytest aiofiles aioresponses pytest-asyncio pytest-custom-exit-code \ + pytest-icdiff pytest-instafail pytest-mock pytest-rerunfailures pytest-timeouts \ + pytest-xdist pytest requests_mock time-machine \ + --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt fi if [[ "${USE_AIRFLOW_VERSION}" =~ ^2\.2\..*|^2\.1\..*|^2\.0\..* && "${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=}" != "" ]]; then diff --git a/contributing-docs/testing/unit_tests.rst b/contributing-docs/testing/unit_tests.rst index c7eaed99e23d0..3bc5cc88e13b3 100644 --- a/contributing-docs/testing/unit_tests.rst +++ b/contributing-docs/testing/unit_tests.rst @@ -1116,7 +1116,7 @@ if the providers still work when installed for older airflow versions. .. note:: - For now it's done for 2.9.1 version only. + For now it's done for 2.9.1 and 2.8.4 version only. Those tests can be used to test compatibility of the providers with past and future releases of airflow. For example it could be used to run latest provider versions with released or main @@ -1148,7 +1148,11 @@ This can be reproduced locally building providers from tag/commit of the airflow breeze release-management generate-constraints --airflow-constraints-mode constraints-source-providers --answer yes -3. Enter breeze environment, installing selected airflow version and the provider packages prepared from main +4. Remove providers that are not compatible with Airflow version installed by default. You can look up + the incompatible providers in the ``BASE_PROVIDERS_COMPATIBILITY_CHECKS`` constant in the + ``./dev/breeze/src/airflow_breeze/global_constants.py`` file. + +5. Enter breeze environment, installing selected airflow version and the provider packages prepared from main .. code-block::bash @@ -1158,13 +1162,13 @@ This can be reproduced locally building providers from tag/commit of the airflow --providers-skip-constraints \ --mount-sources tests -4. You can then run tests as usual: +6. You can then run tests as usual: .. code-block::bash pytest tests/providers//test.py -5. Iterate with the tests +7. Iterate with the tests The tests are run using: @@ -1182,7 +1186,6 @@ Rebuilding single provider package can be done using this command: breeze release-management prepare-provider-packages \ --version-suffix-for-pypi dev0 --package-format wheel - Note that some of the tests if written without taking care about the compatibility, might not work with older versions of Airflow - this is because of refactorings, renames, and tests relying on internals of Airflow that are not part of the public API. We deal with it in one of the following ways: diff --git a/dev/breeze/doc/images/output-commands.svg b/dev/breeze/doc/images/output-commands.svg index b8407c5808d1c..1bca7090fc204 100644 --- a/dev/breeze/doc/images/output-commands.svg +++ b/dev/breeze/doc/images/output-commands.svg @@ -295,52 +295,52 @@ Usage:breeze[OPTIONSCOMMAND [ARGS]... ╭─ Execution mode ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. +--python-pPython major/minor version used in Airflow image for images. (>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           [default: 3.8]                                               ---integrationIntegration(s) to enable when running (can be more than one).                        +--integrationIntegration(s) to enable when running (can be more than one).                        (all | all-testable | cassandra | celery | kafka | kerberos | mongo | mssql |        openlineage | otel | pinot | qdrant | statsd | trino)                                ---standalone-dag-processorRun standalone dag processor for start-airflow. ---database-isolationRun airflow in database isolation mode. +--standalone-dag-processorRun standalone dag processor for start-airflow. +--database-isolationRun airflow in database isolation mode. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Docker Compose selection and cleanup ───────────────────────────────────────────────────────────────────────────────╮ ---project-nameName of the docker-compose project to bring down. The `docker-compose` is for legacy breeze        -project name and you can use `breeze down --project-name docker-compose` to stop all containers    +--project-nameName of the docker-compose project to bring down. The `docker-compose` is for legacy breeze        +project name and you can use `breeze down --project-name docker-compose` to stop all containers    belonging to it.                                                                                   (breeze | pre-commit | docker-compose)                                                             [default: breeze]                                                                                  ---docker-hostOptional - docker host to use when running docker commands. When set, the `--builder` option is    +--docker-hostOptional - docker host to use when running docker commands. When set, the `--builder` option is    ignored when building images.                                                                      (TEXT)                                                                                             ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Database ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---backend-bDatabase backend to use. If 'none' is selected, breeze starts with invalid DB configuration  +--backend-bDatabase backend to use. If 'none' is selected, breeze starts with invalid DB configuration  and no database and any attempts to connect to Airflow DB will fail.                         (>sqlite< | mysql | postgres | none)                                                         [default: sqlite]                                                                            ---postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] ---mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] ---db-reset-dReset DB when entering the container. +--postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] +--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] +--db-reset-dReset DB when entering the container. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Build CI image (before entering shell) ─────────────────────────────────────────────────────────────────────────────╮ ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) [default: autodetect]                                          ---use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] ---uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).(INTEGER RANGE) +--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv] +--uv-http-timeoutTimeout for requests that UV makes (only used in case of UV builds).(INTEGER RANGE) [default: 300; x>=1]                                                 ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Other options ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---forward-credentials-fForward local credentials to container when running. ---max-timeMaximum time that the command should take - if it takes longer, the command will fail. +--forward-credentials-fForward local credentials to container when running. +--max-timeMaximum time that the command should take - if it takes longer, the command will fail. (INTEGER RANGE)                                                                        ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Developer commands ─────────────────────────────────────────────────────────────────────────────────────────────────╮ start-airflow          Enter breeze environment and starts all Airflow components in the tmux session. Compile     diff --git a/dev/breeze/doc/images/output_testing_db-tests.svg b/dev/breeze/doc/images/output_testing_db-tests.svg index 0860964ad880f..3a2e698fa89cf 100644 --- a/dev/breeze/doc/images/output_testing_db-tests.svg +++ b/dev/breeze/doc/images/output_testing_db-tests.svg @@ -365,98 +365,98 @@ container. ╭─ Select tests to run ────────────────────────────────────────────────────────────────────────────────────────────────╮ ---parallel-test-typesSpace separated list of test types used for testing in parallel                    +--parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           Providers[google] PythonVenv Serialization WWW]                                    ---excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   +--excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      +--test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      value                                                                                       (INTEGER RANGE)                                                                             [default: 60; x>=0]                                                                         ---enable-coverageEnable coverage capturing for tests in the form of XML files ---collect-onlyCollect tests only, do not run them. ---skip-provider-testsSkip provider tests +--enable-coverageEnable coverage capturing for tests in the form of XML files +--collect-onlyCollect tests only, do not run them. +--skip-provider-testsSkip provider tests ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test environment ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---backend-bDatabase backend to use. If 'none' is selected, breeze starts with   +--backend-bDatabase backend to use. If 'none' is selected, breeze starts with   invalid DB configuration and no database and any attempts to connect to Airflow DB will fail.                                             (>sqlite< | mysql | postgres | none)                                 [default: sqlite]                                                    ---python-pPython major/minor version used in Airflow image for images. +--python-pPython major/minor version used in Airflow image for images. (>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           [default: 3.8]                                               ---postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] ---mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] ---forward-credentials-fForward local credentials to container when running. ---force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. +--postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] +--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] +--forward-credentials-fForward local credentials to container when running. +--force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. [default: force-sa-warnings]                                    ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Options for parallel test commands ─────────────────────────────────────────────────────────────────────────────────╮ ---parallelismMaximum number of processes to use while running the operation in parallel. +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Upgrading/downgrading/removing selected packages ───────────────────────────────────────────────────────────────────╮ ---upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. ---downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. ---downgrade-pendulumDowngrade Pendulum to minimum supported version. ---pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] ---remove-arm-packagesRemoves arm packages from the image to test if ARM collection works +--upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. +--downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. +--downgrade-pendulumDowngrade Pendulum to minimum supported version. +--pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] +--remove-arm-packagesRemoves arm packages from the image to test if ARM collection works ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---airflow-constraints-referenceConstraint reference to use for airflow installation    +--airflow-constraints-referenceConstraint reference to use for airflow installation    (used in calculated constraints URL).                   (TEXT)                                                  ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---image-tagTag of the image which is used to run the image         -(implies --mount-sources=skip).                         +--image-tagTag of the image which is used to run the image         +(implies --mount-sources=skip).                         (TEXT)                                                  [default: latest]                                       ---install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    +--install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    determined from package or airflow version.             [default: no-install-airflow-with-constraints]          ---package-formatFormat of packages.(wheel | sdist | both) +--package-formatFormat of packages.(wheel | sdist | both) [default: wheel]    ---providers-constraints-locationLocation of providers constraints to use (remote URL or +--providers-constraints-locationLocation of providers constraints to use (remote URL or local context file).                                    (TEXT)                                                  ---providers-skip-constraintsDo not use constraints when installing providers. ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  +--providers-skip-constraintsDo not use constraints when installing providers. +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  can also be version (to install from PyPI), `none`,     `wheel`, or `sdist` to install from `dist` folder, or   VCS URL to install from                                 (https://pip.pypa.io/en/stable/topics/vcs-support/).    -Implies --mount-sources `remove`.                       +Implies --mount-sources `remove`.                       (none | wheel | sdist | <airflow_version>)              ---use-packages-from-distInstall all found packages (--package-format determines +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when entering breeze.          ---mount-sourcesChoose scope of local sources that should be mounted,   +--mount-sourcesChoose scope of local sources that should be mounted,   skipped, or removed (default = selected).               (selected | all | skip | remove | tests)                [default: selected]                                     ---skip-docker-compose-downSkips running docker-compose down after tests ---skip-providersComa separated list of providers to skip when running   -tests                                                   +--skip-docker-compose-downSkips running docker-compose down after tests +--skip-providersSpace-separated list of provider ids to skip when       +running tests                                           (TEXT)                                                  ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_testing_db-tests.txt b/dev/breeze/doc/images/output_testing_db-tests.txt index 1648cc7cbba30..97966e3774695 100644 --- a/dev/breeze/doc/images/output_testing_db-tests.txt +++ b/dev/breeze/doc/images/output_testing_db-tests.txt @@ -1 +1 @@ -31bee62efc24fa61aa868a0643e0db6b +17d0216889e996fe5fd813e0f1c76af6 diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.svg b/dev/breeze/doc/images/output_testing_non-db-tests.svg index 1b0303f5deeb3..c784e7df79ea1 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.svg +++ b/dev/breeze/doc/images/output_testing_non-db-tests.svg @@ -340,91 +340,91 @@ them in parallel via pytest-xdist in single container, with `none` backend set. ╭─ Select test types to run ───────────────────────────────────────────────────────────────────────────────────────────╮ ---parallel-test-typesSpace separated list of test types used for testing in parallel                    +--parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           Providers[google] PythonVenv Serialization WWW]                                    ---excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   +--excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      +--test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this      value                                                                                       (INTEGER RANGE)                                                                             [default: 60; x>=0]                                                                         ---enable-coverageEnable coverage capturing for tests in the form of XML files ---collect-onlyCollect tests only, do not run them. ---skip-provider-testsSkip provider tests +--enable-coverageEnable coverage capturing for tests in the form of XML files +--collect-onlyCollect tests only, do not run them. +--skip-provider-testsSkip provider tests ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test environment ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. +--python-pPython major/minor version used in Airflow image for images. (>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           [default: 3.8]                                               ---forward-credentials-fForward local credentials to container when running. ---force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. +--forward-credentials-fForward local credentials to container when running. +--force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. [default: force-sa-warnings]                                    ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Options for parallel test commands ─────────────────────────────────────────────────────────────────────────────────╮ ---parallelismMaximum number of processes to use while running the operation in parallel. +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Upgrading/downgrading/removing selected packages ───────────────────────────────────────────────────────────────────╮ ---upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. ---downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. ---downgrade-pendulumDowngrade Pendulum to minimum supported version. ---pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] ---remove-arm-packagesRemoves arm packages from the image to test if ARM collection works +--upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. +--downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. +--downgrade-pendulumDowngrade Pendulum to minimum supported version. +--pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] +--remove-arm-packagesRemoves arm packages from the image to test if ARM collection works ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---airflow-constraints-referenceConstraint reference to use for airflow installation    +--airflow-constraints-referenceConstraint reference to use for airflow installation    (used in calculated constraints URL).                   (TEXT)                                                  ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---image-tagTag of the image which is used to run the image         -(implies --mount-sources=skip).                         +--image-tagTag of the image which is used to run the image         +(implies --mount-sources=skip).                         (TEXT)                                                  [default: latest]                                       ---install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    +--install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    determined from package or airflow version.             [default: no-install-airflow-with-constraints]          ---package-formatFormat of packages.(wheel | sdist | both) +--package-formatFormat of packages.(wheel | sdist | both) [default: wheel]    ---providers-constraints-locationLocation of providers constraints to use (remote URL or +--providers-constraints-locationLocation of providers constraints to use (remote URL or local context file).                                    (TEXT)                                                  ---providers-skip-constraintsDo not use constraints when installing providers. ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  +--providers-skip-constraintsDo not use constraints when installing providers. +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  can also be version (to install from PyPI), `none`,     `wheel`, or `sdist` to install from `dist` folder, or   VCS URL to install from                                 (https://pip.pypa.io/en/stable/topics/vcs-support/).    -Implies --mount-sources `remove`.                       +Implies --mount-sources `remove`.                       (none | wheel | sdist | <airflow_version>)              ---use-packages-from-distInstall all found packages (--package-format determines +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when entering breeze.          ---mount-sourcesChoose scope of local sources that should be mounted,   +--mount-sourcesChoose scope of local sources that should be mounted,   skipped, or removed (default = selected).               (selected | all | skip | remove | tests)                [default: selected]                                     ---skip-docker-compose-downSkips running docker-compose down after tests ---skip-providersComa separated list of providers to skip when running   -tests                                                   +--skip-docker-compose-downSkips running docker-compose down after tests +--skip-providersSpace-separated list of provider ids to skip when       +running tests                                           (TEXT)                                                  ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_testing_non-db-tests.txt b/dev/breeze/doc/images/output_testing_non-db-tests.txt index 96342cca85c29..219ca7e38ffb8 100644 --- a/dev/breeze/doc/images/output_testing_non-db-tests.txt +++ b/dev/breeze/doc/images/output_testing_non-db-tests.txt @@ -1 +1 @@ -543f9814b475e511749fdebf29d16298 +7f335b6d8225b8fb373b698c38bb86cf diff --git a/dev/breeze/doc/images/output_testing_tests.svg b/dev/breeze/doc/images/output_testing_tests.svg index 0c613f322aec3..df8529eeed5ca 100644 --- a/dev/breeze/doc/images/output_testing_tests.svg +++ b/dev/breeze/doc/images/output_testing_tests.svg @@ -416,115 +416,115 @@ more opinionated test suite execution. ╭─ Select test types to run (tests can also be selected by command args individually) ─────────────────────────────────╮ ---test-typeType of test to run. With Providers, you can specify tests of which providers      +--test-typeType of test to run. With Providers, you can specify tests of which providers      should be run: `Providers[airbyte,http]` or excluded from the full test suite:     `Providers[-amazon,google]`                                                        (All | Default | API | Always | BranchExternalPython | BranchPythonVenv | CLI |    Core | ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv  | Serialization | WWW | All-Postgres | All-MySQL | All-Quarantined)                [default: Default]                                                                 ---parallel-test-typesSpace separated list of test types used for testing in parallel                    +--parallel-test-typesSpace separated list of test types used for testing in parallel                    (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               [default: API Always BranchExternalPython BranchPythonVenv CLI Core ExternalPython Operators Other PlainAsserts Providers[-amazon,google] Providers[amazon]           Providers[google] PythonVenv Serialization WWW]                                    ---excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   +--excluded-parallel-test-typesSpace separated list of test types that will be excluded from parallel tes runs.   (API | Always | BranchExternalPython | BranchPythonVenv | CLI | Core |             ExternalPython | Operators | Other | PlainAsserts | Providers | PythonVenv |       Serialization | WWW)                                                               ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this    +--test-timeoutTest timeout in seconds. Set the pytest setup, execution and teardown timeouts to this    value                                                                                     (INTEGER RANGE)                                                                           [default: 60; x>=0]                                                                       ---enable-coverageEnable coverage capturing for tests in the form of XML files ---collect-onlyCollect tests only, do not run them. ---db-reset-dReset DB when entering the container. ---skip-provider-testsSkip provider tests +--enable-coverageEnable coverage capturing for tests in the form of XML files +--collect-onlyCollect tests only, do not run them. +--db-reset-dReset DB when entering the container. +--skip-provider-testsSkip provider tests ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Selectively run DB or non-DB tests ─────────────────────────────────────────────────────────────────────────────────╮ ---run-db-tests-onlyOnly runs tests that require a database ---skip-db-testsSkip tests that require a database +--run-db-tests-onlyOnly runs tests that require a database +--skip-db-testsSkip tests that require a database ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Test environment ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---integrationIntegration(s) to enable when running (can be more than one).        +--integrationIntegration(s) to enable when running (can be more than one).        (all | all-testable | cassandra | celery | kafka | kerberos | mongo  | mssql | openlineage | otel | pinot | qdrant | statsd | trino)      ---backend-bDatabase backend to use. If 'none' is selected, breeze starts with   +--backend-bDatabase backend to use. If 'none' is selected, breeze starts with   invalid DB configuration and no database and any attempts to connect to Airflow DB will fail.                                             (>sqlite< | mysql | postgres | none)                                 [default: sqlite]                                                    ---python-pPython major/minor version used in Airflow image for images. +--python-pPython major/minor version used in Airflow image for images. (>3.8< | 3.9 | 3.10 | 3.11 | 3.12)                           [default: 3.8]                                               ---postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] ---mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] ---forward-credentials-fForward local credentials to container when running. ---force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. +--postgres-version-PVersion of Postgres used.(>12< | 13 | 14 | 15 | 16)[default: 12] +--mysql-version-MVersion of MySQL used.(>8.0< | 8.4)[default: 8.0] +--forward-credentials-fForward local credentials to container when running. +--force-sa-warnings/--no-force-sa-warningsEnable `sqlalchemy.exc.MovedIn20Warning` during the tests runs. [default: force-sa-warnings]                                    ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Options for parallel test commands ─────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of parameters. ---use-xdistUse xdist plugin for pytest ---parallelismMaximum number of processes to use while running the operation in parallel. +--run-in-parallelRun the operation in parallel on all or selected subset of parameters. +--use-xdistUse xdist plugin for pytest +--parallelismMaximum number of processes to use while running the operation in parallel. (INTEGER RANGE)                                                             [default: 4; 1<=x<=8]                                                       ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Upgrading/downgrading/removing selected packages ───────────────────────────────────────────────────────────────────╮ ---upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. ---downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. ---downgrade-pendulumDowngrade Pendulum to minimum supported version. ---pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] ---remove-arm-packagesRemoves arm packages from the image to test if ARM collection works +--upgrade-botoRemove aiobotocore and upgrade botocore and boto to the latest version. +--downgrade-sqlalchemyDowngrade SQLAlchemy to minimum supported version. +--downgrade-pendulumDowngrade Pendulum to minimum supported version. +--pydanticDetermines which pydantic should be used during tests.(v2 | v1 | none)[default: v2] +--remove-arm-packagesRemoves arm packages from the image to test if ARM collection works ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Advanced flag for tests command ────────────────────────────────────────────────────────────────────────────────────╮ ---airflow-constraints-referenceConstraint reference to use for airflow installation    +--airflow-constraints-referenceConstraint reference to use for airflow installation    (used in calculated constraints URL).                   (TEXT)                                                  ---github-repository-gGitHub repository used to pull, push run images.(TEXT) +--github-repository-gGitHub repository used to pull, push run images.(TEXT) [default: apache/airflow]                        ---image-tagTag of the image which is used to run the image         -(implies --mount-sources=skip).                         +--image-tagTag of the image which is used to run the image         +(implies --mount-sources=skip).                         (TEXT)                                                  [default: latest]                                       ---install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    +--install-airflow-with-constraints/--no-install-airflo…Install airflow in a separate step, with constraints    determined from package or airflow version.             [default: no-install-airflow-with-constraints]          ---package-formatFormat of packages.(wheel | sdist | both) +--package-formatFormat of packages.(wheel | sdist | both) [default: wheel]    ---providers-constraints-locationLocation of providers constraints to use (remote URL or +--providers-constraints-locationLocation of providers constraints to use (remote URL or local context file).                                    (TEXT)                                                  ---providers-skip-constraintsDo not use constraints when installing providers. ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  +--providers-skip-constraintsDo not use constraints when installing providers. +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It  can also be version (to install from PyPI), `none`,     `wheel`, or `sdist` to install from `dist` folder, or   VCS URL to install from                                 (https://pip.pypa.io/en/stable/topics/vcs-support/).    -Implies --mount-sources `remove`.                       +Implies --mount-sources `remove`.                       (none | wheel | sdist | <airflow_version>)              ---use-packages-from-distInstall all found packages (--package-format determines +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist' folder when entering breeze.          ---mount-sourcesChoose scope of local sources that should be mounted,   +--mount-sourcesChoose scope of local sources that should be mounted,   skipped, or removed (default = selected).               (selected | all | skip | remove | tests)                [default: selected]                                     ---skip-docker-compose-downSkips running docker-compose down after tests ---skip-providersComa separated list of providers to skip when running   -tests                                                   +--skip-docker-compose-downSkips running docker-compose down after tests +--skip-providersSpace-separated list of provider ids to skip when       +running tests                                           (TEXT)                                                  ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ ╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---verbose-vPrint verbose information about performed steps. ---help-hShow this message and exit. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--verbose-vPrint verbose information about performed steps. +--help-hShow this message and exit. ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/dev/breeze/doc/images/output_testing_tests.txt b/dev/breeze/doc/images/output_testing_tests.txt index a2c8ab0a6b621..c61dcdd9a3822 100644 --- a/dev/breeze/doc/images/output_testing_tests.txt +++ b/dev/breeze/doc/images/output_testing_tests.txt @@ -1 +1 @@ -e8d3a79d21f9f690bb81fe1a2921a3ca +d216f0ec4694908bf846b529f055c2ff diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index 0daf3978a05d6..bce382e2b4c22 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -430,7 +430,7 @@ def _verify_parallelism_parameters( ) option_skip_providers = click.option( "--skip-providers", - help="Coma separated list of providers to skip when running tests", + help="Space-separated list of provider ids to skip when running tests", type=str, default="", envvar="SKIP_PROVIDERS", @@ -749,7 +749,7 @@ def _run_test_command( if skip_providers: ignored_path_list = [ f"--ignore=tests/providers/{provider_id.replace('.','/')}" - for provider_id in skip_providers.split(",") + for provider_id in skip_providers.split(" ") ] extra_pytest_args = (*extra_pytest_args, *ignored_path_list) if run_in_parallel: diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 2903d0c26e229..b9045dc7bde41 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -25,7 +25,6 @@ from enum import Enum from functools import lru_cache from pathlib import Path -from typing import Iterable from airflow_breeze.utils.host_info_utils import Architecture from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT @@ -476,29 +475,23 @@ def get_airflow_extras(): CHICKEN_EGG_PROVIDERS = " ".join([]) -def _exclusion(providers: Iterable[str]) -> str: - return " ".join( - [f"apache_airflow_providers_{provider.replace('.', '_').replace('-','_')}*" for provider in providers] - ) - - -BASE_PROVIDERS_COMPATIBILITY_CHECKS: list[dict[str, str]] = [ +BASE_PROVIDERS_COMPATIBILITY_CHECKS: list[dict[str, str | list[str]]] = [ { "python-version": "3.8", "airflow-version": "2.7.1", - "remove-providers": _exclusion(["common.io", "fab"]), + "remove-providers": "common.io fab", "run-tests": "false", }, { "python-version": "3.8", - "airflow-version": "2.8.0", - "remove-providers": _exclusion(["fab"]), - "run-tests": "false", + "airflow-version": "2.8.4", + "remove-providers": "fab", + "run-tests": "true", }, { "python-version": "3.8", "airflow-version": "2.9.1", - "remove-providers": _exclusion([]), + "remove-providers": "", "run-tests": "true", }, ] diff --git a/pyproject.toml b/pyproject.toml index d47675c2650cf..80bf364f2a72e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -382,6 +382,7 @@ combine-as-imports = true "airflow/api/auth/backend/kerberos_auth.py" = ["E402"] "airflow/security/kerberos.py" = ["E402"] "airflow/security/utils.py" = ["E402"] +"tests/providers/common/io/xcom/test_backend.py" = ["E402"] "tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"] "tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"] "tests/providers/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = ["E402"] diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index 8e68887f34d78..6a8601507d012 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -207,6 +207,12 @@ function determine_airflow_to_use() { python "${IN_CONTAINER_DIR}/install_airflow_and_providers.py" # Some packages might leave legacy typing module which causes test issues pip uninstall -y typing || true + # Upgrade pytest and pytest extensions to latest version if they have been accidentally + # downgraded by constraints + pip install --upgrade pytest pytest aiofiles aioresponses pytest-asyncio pytest-custom-exit-code \ + pytest-icdiff pytest-instafail pytest-mock pytest-rerunfailures pytest-timeouts \ + pytest-xdist pytest requests_mock time-machine \ + --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt fi if [[ "${USE_AIRFLOW_VERSION}" =~ ^2\.2\..*|^2\.1\..*|^2\.0\..* && "${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=}" != "" ]]; then diff --git a/tests/conftest.py b/tests/conftest.py index 63e5e40018fcb..d4a3a727b0216 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1005,10 +1005,14 @@ def create_dag( with_dagrun_type=DagRunType.SCHEDULED, **kwargs, ): + op_kwargs = {} + from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + + if AIRFLOW_V_2_9_PLUS: + op_kwargs["task_display_name"] = task_display_name with dag_maker(dag_id, **kwargs) as dag: op = EmptyOperator( task_id=task_id, - task_display_name=task_display_name, max_active_tis_per_dag=max_active_tis_per_dag, max_active_tis_per_dagrun=max_active_tis_per_dagrun, executor_config=executor_config or {}, @@ -1019,6 +1023,7 @@ def create_dag( email=email, pool=pool, trigger_rule=trigger_rule, + **op_kwargs, ) if with_dagrun_type is not None: dag_maker.create_dagrun(run_type=with_dagrun_type) @@ -1170,11 +1175,17 @@ def reset_logging_config(): def suppress_info_logs_for_dag_and_fab(): import logging + from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + dag_logger = logging.getLogger("airflow.models.dag") dag_logger.setLevel(logging.WARNING) - fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") - fab_logger.setLevel(logging.WARNING) + if AIRFLOW_V_2_9_PLUS: + fab_logger = logging.getLogger("airflow.providers.fab.auth_manager.security_manager.override") + fab_logger.setLevel(logging.WARNING) + else: + fab_logger = logging.getLogger("airflow.www.fab_security") + fab_logger.setLevel(logging.WARNING) @pytest.fixture(scope="module", autouse=True) diff --git a/tests/providers/common/io/operators/test_file_transfer.py b/tests/providers/common/io/operators/test_file_transfer.py index 2dc204eb3d3b8..90b463a76260b 100644 --- a/tests/providers/common/io/operators/test_file_transfer.py +++ b/tests/providers/common/io/operators/test_file_transfer.py @@ -57,7 +57,6 @@ def test_get_openlineage_facets_on_start(): expected_input = Dataset(namespace=f"s3://{src_bucket}", name=src_key) expected_output = Dataset(namespace=f"s3://{dst_bucket}", name=dst_key) - op = FileTransferOperator( task_id="test", src=f"s3://{src_bucket}/{src_key}", diff --git a/tests/providers/common/io/xcom/test_backend.py b/tests/providers/common/io/xcom/test_backend.py index 2da2d6fecd26b..7d1fd94490d01 100644 --- a/tests/providers/common/io/xcom/test_backend.py +++ b/tests/providers/common/io/xcom/test_backend.py @@ -19,17 +19,28 @@ import pytest +from airflow.exceptions import AirflowOptionalProviderFeatureException +from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS + +pytestmark = [ + pytest.mark.db_test, + pytest.mark.skipif(not AIRFLOW_V_2_9_PLUS, reason="Tests for Airflow 2.9.0+ only"), +] + + import airflow.models.xcom from airflow.models.xcom import BaseXCom, resolve_xcom_backend from airflow.operators.empty import EmptyOperator -from airflow.providers.common.io.xcom.backend import XComObjectStorageBackend + +try: + from airflow.providers.common.io.xcom.backend import XComObjectStorageBackend +except AirflowOptionalProviderFeatureException: + pass from airflow.utils import timezone from airflow.utils.xcom import XCOM_RETURN_KEY from tests.test_utils import db from tests.test_utils.config import conf_vars -pytestmark = pytest.mark.db_test - @pytest.fixture(autouse=True) def reset_db(): diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py index 737cedefaa79c..1ac8b91df5a65 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py +++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py @@ -17,6 +17,7 @@ from __future__ import annotations import logging +from contextlib import nullcontext from unittest import mock from urllib.parse import parse_qs, urlsplit @@ -24,9 +25,11 @@ from google.cloud.logging import Resource from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse, LogEntry +from airflow.exceptions import RemovedInAirflow3Warning from airflow.providers.google.cloud.log.stackdriver_task_handler import StackdriverTaskHandler from airflow.utils import timezone from airflow.utils.state import TaskInstanceState +from tests.test_utils.compat import AIRFLOW_V_2_9_PLUS from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_dags, clear_db_runs @@ -81,21 +84,26 @@ def test_should_use_configured_log_name(mock_client, mock_get_creds_and_project_ mock_get_creds_and_project_id.return_value = ("creds", "project_id") try: - with conf_vars( - { - ("logging", "remote_logging"): "True", - ("logging", "remote_base_log_folder"): "stackdriver://host/path", - } - ): - importlib.reload(airflow_local_settings) - settings.configure_logging() - - logger = logging.getLogger("airflow.task") - handler = logger.handlers[0] - assert isinstance(handler, StackdriverTaskHandler) - with mock.patch.object(handler, "transport_type") as transport_type_mock: - logger.error("foo") - transport_type_mock.assert_called_once_with(mock_client.return_value, "path") + # this is needed for Airflow 2.8 and below where default settings are triggering warning on + # extra "name" in the configuration of stackdriver handler. As of Airflow 2.9 this warning is not + # emitted. + context_manager = nullcontext() if AIRFLOW_V_2_9_PLUS else pytest.warns(RemovedInAirflow3Warning) + with context_manager: + with conf_vars( + { + ("logging", "remote_logging"): "True", + ("logging", "remote_base_log_folder"): "stackdriver://host/path", + } + ): + importlib.reload(airflow_local_settings) + settings.configure_logging() + + logger = logging.getLogger("airflow.task") + handler = logger.handlers[0] + assert isinstance(handler, StackdriverTaskHandler) + with mock.patch.object(handler, "transport_type") as transport_type_mock: + logger.error("foo") + transport_type_mock.assert_called_once_with(mock_client.return_value, "path") finally: importlib.reload(airflow_local_settings) settings.configure_logging() diff --git a/tests/providers/smtp/notifications/test_smtp.py b/tests/providers/smtp/notifications/test_smtp.py index 98fd7387e7efa..39b51e8e02ce7 100644 --- a/tests/providers/smtp/notifications/test_smtp.py +++ b/tests/providers/smtp/notifications/test_smtp.py @@ -133,7 +133,7 @@ def test_notifier_with_defaults(self, mock_smtphook_hook, create_task_instance): from_email=conf.get("smtp", "smtp_mail_from"), to="test_reciver@test.com", subject="DAG dag - Task op - Run ID test in State None", - html_content=f"""\n\n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
Run ID:test
Try:{NUM_TRY} of 1
Task State:None
Host:
Log Link:http://localhost:8080/dags/dag/grid?dag_run_id=test&task_id=op&map_index=-1&tab=logs
Mark Success Link:http://localhost:8080/confirm?task_id=op&dag_id=dag&dag_run_id=test&upstream=false&downstream=false&state=success
\n\n""", + html_content=mock.ANY, smtp_conn_id="smtp_default", files=None, cc=None, @@ -142,6 +142,8 @@ def test_notifier_with_defaults(self, mock_smtphook_hook, create_task_instance): mime_charset="utf-8", custom_headers=None, ) + content = mock_smtphook_hook.return_value.__enter__().send_email_smtp.call_args.kwargs["html_content"] + assert f"{NUM_TRY} of 1" in content @mock.patch("airflow.providers.smtp.notifications.smtp.SmtpHook") def test_notifier_with_defaults_sla(self, mock_smtphook_hook, dag_maker): @@ -163,7 +165,7 @@ def test_notifier_with_defaults_sla(self, mock_smtphook_hook, dag_maker): from_email=conf.get("smtp", "smtp_mail_from"), to="test_reciver@test.com", subject="SLA Missed for DAG test_notifier - Task op", - html_content="""\n\n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
Dag:test_notifier
Task List:[]
Blocking Task List:[]
SLAs:[(\'test_notifier\', \'op\', \'2018-01-01T00:00:00+00:00\')]
Blocking TI\'s[]
\n\n""", + html_content=mock.ANY, smtp_conn_id="smtp_default", files=None, cc=None, @@ -172,6 +174,8 @@ def test_notifier_with_defaults_sla(self, mock_smtphook_hook, dag_maker): mime_charset="utf-8", custom_headers=None, ) + content = mock_smtphook_hook.return_value.__enter__().send_email_smtp.call_args.kwargs["html_content"] + assert "Task List:" in content @mock.patch("airflow.providers.smtp.notifications.smtp.SmtpHook") def test_notifier_with_nondefault_conf_vars(self, mock_smtphook_hook, create_task_instance): diff --git a/tests/test_utils/db.py b/tests/test_utils/db.py index 1c2b871b19630..a440262782c78 100644 --- a/tests/test_utils/db.py +++ b/tests/test_utils/db.py @@ -46,7 +46,16 @@ TaskOutletDatasetReference, ) from airflow.models.serialized_dag import SerializedDagModel -from airflow.providers.fab.auth_manager.models import Permission, Resource, assoc_permission_role + +try: + from airflow.providers.fab.auth_manager.models import Permission, Resource, assoc_permission_role +except ImportError: + # Handle Pre-airflow 2.9 case where FAB was part of the core airflow + from airflow.auth.managers.fab.models import ( # type: ignore[no-redef] + Permission, + Resource, + assoc_permission_role, + ) from airflow.security.permissions import RESOURCE_DAG_PREFIX from airflow.utils.db import add_default_pool_if_not_exists, create_default_connections, reflect_tables from airflow.utils.session import create_session