diff --git a/airflow/providers/amazon/aws/hooks/athena_sql.py b/airflow/providers/amazon/aws/hooks/athena_sql.py new file mode 100644 index 0000000000000..873022e54ee3b --- /dev/null +++ b/airflow/providers/amazon/aws/hooks/athena_sql.py @@ -0,0 +1,168 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import json +from functools import cached_property +from typing import TYPE_CHECKING, Any + +import pyathena +from sqlalchemy.engine.url import URL + +from airflow.exceptions import AirflowException, AirflowNotFoundException +from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook +from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper +from airflow.providers.common.sql.hooks.sql import DbApiHook + +if TYPE_CHECKING: + from pyathena.connection import Connection as AthenaConnection + + +class AthenaSQLHook(AwsBaseHook, DbApiHook): + """Interact with Amazon Athena. + + Provide wrapper around PyAthena library. + + :param athena_conn_id: :ref:`Amazon Athena Connection `. + + Additional arguments (such as ``aws_conn_id``) may be specified and + are passed down to the underlying AwsBaseHook. + + You can specify ``driver`` in ``extra`` of your connection in order to use + a different driver than the default ``rest``. + + Also, aws_domain could be specified in ``extra`` of your connection. + + PyAthena and AWS Authentication parameters could be passed in extra field of ``athena_conn_id`` connection. + + Passing authentication parameters in ``athena_conn_id`` will override those in ``aws_conn_id``. + + .. seealso:: + :class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook` + + .. note:: + get_uri() depends on SQLAlchemy and PyAthena. + """ + + conn_name_attr = "athena_conn_id" + default_conn_name = "athena_default" + conn_type = "athena" + hook_name = "Amazon Athena" + supports_autocommit = True + + def __init__(self, athena_conn_id: str = default_conn_name, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.athena_conn_id = athena_conn_id + + @classmethod + def get_ui_field_behaviour(cls) -> dict[str, Any]: + """Return custom UI field behaviour for AWS Athena Connection.""" + return { + "hidden_fields": ["host", "port"], + "relabeling": { + "login": "AWS Access Key ID", + "password": "AWS Secret Access Key", + }, + "placeholders": { + "login": "AKIAIOSFODNN7EXAMPLE", + "password": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "extra": json.dumps( + { + "aws_domain": "amazonaws.com", + "driver": "rest", + "s3_staging_dir": "s3://bucket_name/staging/", + "work_group": "primary", + "region_name": "us-east-1", + "session_kwargs": {"profile_name": "default"}, + "config_kwargs": {"retries": {"mode": "standard", "max_attempts": 10}}, + "role_arn": "arn:aws:iam::123456789098:role/role-name", + "assume_role_method": "assume_role", + "assume_role_kwargs": {"RoleSessionName": "airflow"}, + "aws_session_token": "AQoDYXdzEJr...EXAMPLETOKEN", + "endpoint_url": "http://localhost:4566", + }, + indent=2, + ), + }, + } + + @cached_property + def conn_config(self) -> AwsConnectionWrapper: + """Get the Airflow Connection object and wrap it in helper (cached).""" + athena_conn = self.get_connection(self.athena_conn_id) + if self.aws_conn_id: + try: + connection = self.get_connection(self.aws_conn_id) + connection.login = athena_conn.login + connection.password = athena_conn.password + connection.schema = athena_conn.schema + connection.set_extra(json.dumps({**athena_conn.extra_dejson, **connection.extra_dejson})) + except AirflowNotFoundException: + connection = athena_conn + connection.conn_type = "aws" + self.log.warning( + "Unable to find AWS Connection ID '%s', switching to empty.", self.aws_conn_id + ) + + return AwsConnectionWrapper( + conn=connection, region_name=self._region_name, botocore_config=self._config, verify=self._verify + ) + + @property + def conn(self) -> AwsConnectionWrapper: + """Get Aws Connection Wrapper object.""" + return self.conn_config + + def _get_conn_params(self) -> dict[str, str | None]: + """Retrieve connection parameters.""" + if not self.conn.region_name: + raise AirflowException("region_name must be specified in the connection's extra") + + return dict( + driver=self.conn.extra_dejson.get("driver", "rest"), + schema_name=self.conn.schema, + region_name=self.conn.region_name, + aws_domain=self.conn.extra_dejson.get("aws_domain", "amazonaws.com"), + ) + + def get_uri(self) -> str: + """Overridden to use the Athena dialect as driver name.""" + conn_params = self._get_conn_params() + creds = self.get_credentials(region_name=conn_params["region_name"]) + + return URL.create( + f'awsathena+{conn_params["driver"]}', + username=creds.access_key, + password=creds.secret_key, + host=f'athena.{conn_params["region_name"]}.{conn_params["aws_domain"]}', + port=443, + database=conn_params["schema_name"], + query={"aws_session_token": creds.token, **self.conn.extra_dejson}, + ) + + def get_conn(self) -> AthenaConnection: + """Get a ``pyathena.Connection`` object.""" + conn_params = self._get_conn_params() + + conn_kwargs: dict = { + "schema_name": conn_params["schema_name"], + "region_name": conn_params["region_name"], + "session": self.get_session(region_name=conn_params["region_name"]), + **self.conn.extra_dejson, + } + + return pyathena.connect(**conn_kwargs) diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index 4cd8a1278a917..1b90089db2536 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -92,6 +92,7 @@ dependencies: - redshift_connector>=2.0.918 - sqlalchemy_redshift>=0.8.6 - asgiref + - PyAthena>=3.0.10 additional-extras: - name: pandas @@ -128,7 +129,8 @@ integrations: external-doc-url: https://aws.amazon.com/athena/ logo: /integration-logos/aws/Amazon-Athena_light-bg@4x.png how-to-guide: - - /docs/apache-airflow-providers-amazon/operators/athena.rst + - /docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst + - /docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst tags: [aws] - integration-name: Amazon Chime external-doc-url: https://aws.amazon.com/chime/ @@ -488,6 +490,7 @@ hooks: - integration-name: Amazon Athena python-modules: - airflow.providers.amazon.aws.hooks.athena + - airflow.providers.amazon.aws.hooks.athena_sql - integration-name: Amazon Chime python-modules: - airflow.providers.amazon.aws.hooks.chime @@ -755,6 +758,8 @@ connection-types: connection-type: emr - hook-class-name: airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook connection-type: redshift + - hook-class-name: airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook + connection-type: athena notifications: - airflow.providers.amazon.aws.notifications.chime.ChimeNotifier diff --git a/airflow/utils/db.py b/airflow/utils/db.py index 03bb33cfac093..b9ee8323611c4 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -137,6 +137,13 @@ def create_default_connections(session: Session = NEW_SESSION): ), session, ) + merge_conn( + Connection( + conn_id="athena_default", + conn_type="athena", + ), + session, + ) merge_conn( Connection( conn_id="aws_default", diff --git a/docs/apache-airflow-providers-amazon/connections/athena.rst b/docs/apache-airflow-providers-amazon/connections/athena.rst new file mode 100644 index 0000000000000..823447035e304 --- /dev/null +++ b/docs/apache-airflow-providers-amazon/connections/athena.rst @@ -0,0 +1,63 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _howto/connection:athena: + +Amazon Athena Connection +========================== + +The Athena connection type enables DB API 2.0 integrations with Athena. + +.. note:: + This connection type is meant to be used with ``AthenaSqlHook``. + For ``AthenaHook`` use the `Amazon Web Services Connection <./aws.rst>`_ Type instead. + +Authenticating to Amazon Athena +--------------------------------- + +Authentication may be performed using any of the authentication methods supported by `Amazon Web Services Connection <./aws.rst>`_. + +Default Connection IDs +---------------------- + +The default connection ID is ``athena_default``. + +Configuring the Connection +-------------------------- + +Schema (optional) + Specify the Amazon Athena database name. + +Extra + Specify the extra parameters (as json dictionary) that can be used in + Amazon Athena connection. + + * ``region_name``: AWS Region for the connection (mandatory). + * ``work_group``: Athena work group to use (optional). + * ``s3_staging_dir``: Athena S3 staging directory (optional). + +.. note:: + You must define either ``work_group`` or ``s3_staging_dir`` in extra field. + +You can pass additional parameters to PyAthena by specifying them in the +``extra`` field of your connection as JSON. For example, to specify +please see the `documentation `_ +for PyAthena supported parameters. + +Since this connection type uses authentication methods from the +`Amazon Web Services Connection <./aws.rst>`_ documentation, please refer to that +for additional information about configuring the connection. diff --git a/docs/apache-airflow-providers-amazon/index.rst b/docs/apache-airflow-providers-amazon/index.rst index 5034816ce81c6..59986ec8d3eee 100644 --- a/docs/apache-airflow-providers-amazon/index.rst +++ b/docs/apache-airflow-providers-amazon/index.rst @@ -119,6 +119,7 @@ PIP package Version required ``redshift_connector`` ``>=2.0.888`` ``sqlalchemy_redshift`` ``>=0.8.6`` ``asgiref`` +``pyAthena`` ``>=3.0.10`` ======================================= ================== Cross provider package dependencies diff --git a/docs/apache-airflow-providers-amazon/operators/athena.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst similarity index 96% rename from docs/apache-airflow-providers-amazon/operators/athena.rst rename to docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst index d82d2edf2b39d..e789290a6ba0f 100644 --- a/docs/apache-airflow-providers-amazon/operators/athena.rst +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst @@ -28,12 +28,12 @@ to your data in S3, define the schema, and start querying using standard SQL. Prerequisite Tasks ------------------ -.. include:: ../_partials/prerequisite_tasks.rst +.. include:: ../../_partials/prerequisite_tasks.rst Generic Parameters ------------------ -.. include:: ../_partials/generic_parameters.rst +.. include:: ../../_partials/generic_parameters.rst Operators --------- diff --git a/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst new file mode 100644 index 0000000000000..ceb45a39b277b --- /dev/null +++ b/docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst @@ -0,0 +1,63 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +=================== +Amazon Athena SQL +=================== + +`Amazon Athena `__ is an interactive query service +that makes it easy to analyze data in Amazon Simple Storage Service (S3) using +standard SQL. Athena is serverless, so there is no infrastructure to setup or +manage, and you pay only for the queries you run. To get started, simply point +to your data in S3, define the schema, and start querying using standard SQL. + +Prerequisite Tasks +------------------ + +.. include:: ../../_partials/prerequisite_tasks.rst + +Operators +--------- + +Execute a SQL query +=================== + +The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries against Amazon Athena using a `Athena connection <../../connections/athena.rst>`_. + +To execute a single SQL query against an Amazon Athena without bringing back the results to Airflow, +please use ``AthenaOperator`` instead. + +.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_sql_execute_query] + :end-before: [END howto_operator_sql_execute_query] + +Also, if you need to do simple data quality tests with Amazon Athena, you can use the ``SQLTableCheckOperator`` + +The below example demonstrates how to instantiate the SQLTableCheckOperator task. + +.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_sql_table_check] + :end-before: [END howto_operator_sql_table_check] + +Reference +--------- + +* `PyAthena `__ diff --git a/docs/apache-airflow-providers-amazon/operators/athena/index.rst b/docs/apache-airflow-providers-amazon/operators/athena/index.rst new file mode 100644 index 0000000000000..85130aba62e2c --- /dev/null +++ b/docs/apache-airflow-providers-amazon/operators/athena/index.rst @@ -0,0 +1,48 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +Amazon Athena Operators +========================= + +Amazon Athena is an interactive query service that makes it easy to analyze data in Amazon S3 using standard SQL. While Amazon Athena itself does not provide a DB API 2.0 (PEP 249) compliant connection, the PyAthena library offers this functionality, building upon the boto3 library. + +This documentation covers two primary ways to interact with Amazon Athena with Airflow: + +1. API (HTTP Boto3): This method uses Amazon Athena's direct API through the boto3 library. It is the preferred method for users who wish to interact with Athena at a lower level, directly through HTTP requests. + +2. DB API Connection (Amazon Athena SQL): For users who prefer a more traditional database interaction, PyAthena implements the DB API 2.0 specification, allowing Athena to be used similarly to other relational databases through SQL. + +Choosing Your Connection Method +--------------------------------- + +Airflow offers two ways to query data using Amazon Athena. + +**Amazon Athena (API):** Choose this option if you need to execute a single statement without bringing back the results in airflow. + +**Amazon Athena SQL (DB API Connection):** Opt for this if you need to execute multiple queries in the same operator and it's essential to retrieve and process query results directly in Airflow, such as for sensing values or further data manipulation. + +.. note:: + Both connection methods uses `Amazon Web Services Connection <../../connections/aws>`_ under the hood for authentication. + You should decide which connection method to use based on your use case. + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs/apache-airflow-providers-amazon/operators/index.rst b/docs/apache-airflow-providers-amazon/operators/index.rst index 7c7082148e2fd..fa55e97d36f31 100644 --- a/docs/apache-airflow-providers-amazon/operators/index.rst +++ b/docs/apache-airflow-providers-amazon/operators/index.rst @@ -25,6 +25,7 @@ Amazon AWS Operators :maxdepth: 1 :glob: + athena/index emr/index redshift/index s3/index diff --git a/docs/apache-airflow-providers-amazon/redirects.txt b/docs/apache-airflow-providers-amazon/redirects.txt index fad1f914880ab..6fad29b08d261 100644 --- a/docs/apache-airflow-providers-amazon/redirects.txt +++ b/docs/apache-airflow-providers-amazon/redirects.txt @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +operators/athena.rst operators/athena/athena_boto.rst operators/s3_to_redshift.rst transfer/s3_to_redshift.rst operators/google_api_to_s3_transfer.rst transfer/google_api_to_s3.rst operators/imap_attachment_to_s3.rst transfer/imap_attachment_to_s3.rst diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 6e5eaa07fd01c..e5129c16fd97a 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -1572,6 +1572,7 @@ TaskGroups TaskInstance taskinstance TaskInstanceKey +taskinstancekey taskmeta taskmixin tasksetmeta diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json index b38b9b0d916a0..37fc4f54eaa9f 100644 --- a/generated/provider_dependencies.json +++ b/generated/provider_dependencies.json @@ -25,6 +25,7 @@ }, "amazon": { "deps": [ + "PyAthena>=3.0.10", "apache-airflow-providers-common-sql>=1.3.1", "apache-airflow-providers-http", "apache-airflow>=2.6.0", diff --git a/pyproject.toml b/pyproject.toml index 7dcf7fc330b21..3fd7222febb5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -549,6 +549,7 @@ amazon = [ "mypy-boto3-redshift-data>=1.33.0", "mypy-boto3-s3>=1.33.0", "s3fs>=2023.10.0", + "PyAthena>=3.0.10", ] apache-beam = [ "apache-beam>=2.53.0", diff --git a/tests/providers/amazon/aws/hooks/test_athena_sql.py b/tests/providers/amazon/aws/hooks/test_athena_sql.py new file mode 100644 index 0000000000000..14249bd35dbca --- /dev/null +++ b/tests/providers/amazon/aws/hooks/test_athena_sql.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from unittest import mock + +import pytest + +from airflow.models import Connection +from airflow.providers.amazon.aws.hooks.athena_sql import AthenaSQLHook +from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper + +REGION_NAME = "us-east-1" +WORK_GROUP = "test-work-group" +SCHEMA_NAME = "athena_sql_schema" +AWS_ACCESS_KEY_ID = "aws_access_key_id" +AWS_SECRET_ACCESS_KEY = "aws_secret_access_key" +AWS_SESSION_TOKEN = "aws_session_token" + +AWS_CONN_ID = "aws_not_default" +AWS_ATHENA_CONN_ID = "aws_athena_not_default" + + +class TestAthenaSQLHookConn: + def setup_method(self): + conn = Connection( + conn_type="athena", + schema=SCHEMA_NAME, + extra={"work_group": WORK_GROUP, "region_name": REGION_NAME}, + ) + self.conn_athena = AwsConnectionWrapper(conn) + + self.db_hook = AthenaSQLHook() + + self.db_hook.get_connection = mock.Mock() + self.db_hook.get_connection.return_value = conn + + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook.get_credentials") + def test_get_uri(self, mock_get_credentials): + mock_get_credentials.return_value = mock.Mock( + access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY, token=AWS_SESSION_TOKEN + ) + + expected_athena_uri = "awsathena+rest://aws_access_key_id:aws_secret_access_key@athena.us-east-1.amazonaws.com:443/athena_sql_schema?aws_session_token=aws_session_token®ion_name=us-east-1&work_group=test-work-group" + + athena_uri = self.db_hook.get_uri() + + mock_get_credentials.assert_called_once_with(region_name=REGION_NAME) + + assert str(athena_uri) == expected_athena_uri + + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook._get_conn_params") + def test_get_uri_change_driver(self, mock_get_conn_params): + mock_get_conn_params.return_value = dict( + driver="arrow", schema_name=SCHEMA_NAME, region_name=REGION_NAME, aws_domain="amazonaws.com" + ) + + athena_uri = self.db_hook.get_uri() + + assert str(athena_uri).startswith("awsathena+arrow://") + + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.pyathena.connect") + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook.get_session") + def test_get_conn(self, mock_get_session, mock_connect): + self.db_hook.get_conn() + + mock_get_session.assert_called_once_with(region_name=REGION_NAME) + + mock_connect.assert_called_once_with( + schema_name=SCHEMA_NAME, + region_name=REGION_NAME, + session=mock_get_session.return_value, + work_group=WORK_GROUP, + ) + + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.pyathena.connect") + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook.get_session") + def test_get_conn_with_aws_conn(self, mock_get_session, mock_connect): + self.db_hook.get_conn() + + mock_get_session.assert_called_once_with(region_name=REGION_NAME) + + mock_connect.assert_called_once_with( + schema_name=SCHEMA_NAME, + region_name=REGION_NAME, + session=mock_get_session.return_value, + work_group=WORK_GROUP, + ) + + @pytest.mark.parametrize( + "conn_params, conn_extra, expected_call_args", + [ + ( + {"schema": "athena_sql_schema1"}, + {"region_name": "us-east-2"}, + {"region_name": "us-east-2", "schema_name": "athena_sql_schema1", "session": mock.ANY}, + ), + ( + {"schema": "athena_sql_schema2"}, + {"work_group": "test-work-group", "region_name": "us-east-2"}, + { + "region_name": "us-east-2", + "schema_name": "athena_sql_schema2", + "work_group": "test-work-group", + "session": mock.ANY, + }, + ), + ( + {"schema": "athena_sql_schema3"}, + {"s3_staging_dir": "s3://test-bucket/", "region_name": "us-east-3"}, + { + "region_name": "us-east-3", + "schema_name": "athena_sql_schema3", + "s3_staging_dir": "s3://test-bucket/", + "session": mock.ANY, + }, + ), + ], + ) + @mock.patch("airflow.providers.amazon.aws.hooks.athena_sql.pyathena.connect") + def test_get_conn_passing_args(self, mock_connect, conn_params, conn_extra, expected_call_args): + with mock.patch( + "airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook.conn", + AwsConnectionWrapper(Connection(conn_type="athena", extra=conn_extra, **conn_params)), + ): + self.db_hook.get_conn() + mock_connect.assert_called_once_with(**expected_call_args) + + def test_conn_id_default_setter(self): + assert self.db_hook.athena_conn_id == "athena_default" + assert self.db_hook.aws_conn_id == "aws_default" + + def test_conn_id_override_setter(self): + hook = AthenaSQLHook(athena_conn_id=AWS_ATHENA_CONN_ID, aws_conn_id=AWS_CONN_ID) + assert hook.athena_conn_id == AWS_ATHENA_CONN_ID + assert hook.aws_conn_id == AWS_CONN_ID