Skip to content

Commit

Permalink
AthenaSqlHook implementation (apache#36171)
Browse files Browse the repository at this point in the history
Co-authored-by: Vincent <97131062+vincbeck@users.noreply.github.com>
Co-authored-by: Josh Fell <48934154+josh-fell@users.noreply.github.com>
Co-authored-by: Andrey Anshin <Andrey.Anshin@taragol.is>
  • Loading branch information
4 people authored Jan 17, 2024
1 parent 437d4e4 commit 6661272
Show file tree
Hide file tree
Showing 14 changed files with 513 additions and 3 deletions.
168 changes: 168 additions & 0 deletions airflow/providers/amazon/aws/hooks/athena_sql.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations

import json
from functools import cached_property
from typing import TYPE_CHECKING, Any

import pyathena
from sqlalchemy.engine.url import URL

from airflow.exceptions import AirflowException, AirflowNotFoundException
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
from airflow.providers.common.sql.hooks.sql import DbApiHook

if TYPE_CHECKING:
from pyathena.connection import Connection as AthenaConnection


class AthenaSQLHook(AwsBaseHook, DbApiHook):
"""Interact with Amazon Athena.
Provide wrapper around PyAthena library.
:param athena_conn_id: :ref:`Amazon Athena Connection <howto/connection:athena>`.
Additional arguments (such as ``aws_conn_id``) may be specified and
are passed down to the underlying AwsBaseHook.
You can specify ``driver`` in ``extra`` of your connection in order to use
a different driver than the default ``rest``.
Also, aws_domain could be specified in ``extra`` of your connection.
PyAthena and AWS Authentication parameters could be passed in extra field of ``athena_conn_id`` connection.
Passing authentication parameters in ``athena_conn_id`` will override those in ``aws_conn_id``.
.. seealso::
:class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
.. note::
get_uri() depends on SQLAlchemy and PyAthena.
"""

conn_name_attr = "athena_conn_id"
default_conn_name = "athena_default"
conn_type = "athena"
hook_name = "Amazon Athena"
supports_autocommit = True

def __init__(self, athena_conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.athena_conn_id = athena_conn_id

@classmethod
def get_ui_field_behaviour(cls) -> dict[str, Any]:
"""Return custom UI field behaviour for AWS Athena Connection."""
return {
"hidden_fields": ["host", "port"],
"relabeling": {
"login": "AWS Access Key ID",
"password": "AWS Secret Access Key",
},
"placeholders": {
"login": "AKIAIOSFODNN7EXAMPLE",
"password": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
"extra": json.dumps(
{
"aws_domain": "amazonaws.com",
"driver": "rest",
"s3_staging_dir": "s3://bucket_name/staging/",
"work_group": "primary",
"region_name": "us-east-1",
"session_kwargs": {"profile_name": "default"},
"config_kwargs": {"retries": {"mode": "standard", "max_attempts": 10}},
"role_arn": "arn:aws:iam::123456789098:role/role-name",
"assume_role_method": "assume_role",
"assume_role_kwargs": {"RoleSessionName": "airflow"},
"aws_session_token": "AQoDYXdzEJr...EXAMPLETOKEN",
"endpoint_url": "http://localhost:4566",
},
indent=2,
),
},
}

@cached_property
def conn_config(self) -> AwsConnectionWrapper:
"""Get the Airflow Connection object and wrap it in helper (cached)."""
athena_conn = self.get_connection(self.athena_conn_id)
if self.aws_conn_id:
try:
connection = self.get_connection(self.aws_conn_id)
connection.login = athena_conn.login
connection.password = athena_conn.password
connection.schema = athena_conn.schema
connection.set_extra(json.dumps({**athena_conn.extra_dejson, **connection.extra_dejson}))
except AirflowNotFoundException:
connection = athena_conn
connection.conn_type = "aws"
self.log.warning(
"Unable to find AWS Connection ID '%s', switching to empty.", self.aws_conn_id
)

return AwsConnectionWrapper(
conn=connection, region_name=self._region_name, botocore_config=self._config, verify=self._verify
)

@property
def conn(self) -> AwsConnectionWrapper:
"""Get Aws Connection Wrapper object."""
return self.conn_config

def _get_conn_params(self) -> dict[str, str | None]:
"""Retrieve connection parameters."""
if not self.conn.region_name:
raise AirflowException("region_name must be specified in the connection's extra")

return dict(
driver=self.conn.extra_dejson.get("driver", "rest"),
schema_name=self.conn.schema,
region_name=self.conn.region_name,
aws_domain=self.conn.extra_dejson.get("aws_domain", "amazonaws.com"),
)

def get_uri(self) -> str:
"""Overridden to use the Athena dialect as driver name."""
conn_params = self._get_conn_params()
creds = self.get_credentials(region_name=conn_params["region_name"])

return URL.create(
f'awsathena+{conn_params["driver"]}',
username=creds.access_key,
password=creds.secret_key,
host=f'athena.{conn_params["region_name"]}.{conn_params["aws_domain"]}',
port=443,
database=conn_params["schema_name"],
query={"aws_session_token": creds.token, **self.conn.extra_dejson},
)

def get_conn(self) -> AthenaConnection:
"""Get a ``pyathena.Connection`` object."""
conn_params = self._get_conn_params()

conn_kwargs: dict = {
"schema_name": conn_params["schema_name"],
"region_name": conn_params["region_name"],
"session": self.get_session(region_name=conn_params["region_name"]),
**self.conn.extra_dejson,
}

return pyathena.connect(**conn_kwargs)
7 changes: 6 additions & 1 deletion airflow/providers/amazon/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ dependencies:
- redshift_connector>=2.0.918
- sqlalchemy_redshift>=0.8.6
- asgiref
- PyAthena>=3.0.10

additional-extras:
- name: pandas
Expand Down Expand Up @@ -128,7 +129,8 @@ integrations:
external-doc-url: https://aws.amazon.com/athena/
logo: /integration-logos/aws/Amazon-Athena_light-bg@4x.png
how-to-guide:
- /docs/apache-airflow-providers-amazon/operators/athena.rst
- /docs/apache-airflow-providers-amazon/operators/athena/athena_boto.rst
- /docs/apache-airflow-providers-amazon/operators/athena/athena_sql.rst
tags: [aws]
- integration-name: Amazon Chime
external-doc-url: https://aws.amazon.com/chime/
Expand Down Expand Up @@ -488,6 +490,7 @@ hooks:
- integration-name: Amazon Athena
python-modules:
- airflow.providers.amazon.aws.hooks.athena
- airflow.providers.amazon.aws.hooks.athena_sql
- integration-name: Amazon Chime
python-modules:
- airflow.providers.amazon.aws.hooks.chime
Expand Down Expand Up @@ -755,6 +758,8 @@ connection-types:
connection-type: emr
- hook-class-name: airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook
connection-type: redshift
- hook-class-name: airflow.providers.amazon.aws.hooks.athena_sql.AthenaSQLHook
connection-type: athena

notifications:
- airflow.providers.amazon.aws.notifications.chime.ChimeNotifier
Expand Down
7 changes: 7 additions & 0 deletions airflow/utils/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,13 @@ def create_default_connections(session: Session = NEW_SESSION):
),
session,
)
merge_conn(
Connection(
conn_id="athena_default",
conn_type="athena",
),
session,
)
merge_conn(
Connection(
conn_id="aws_default",
Expand Down
63 changes: 63 additions & 0 deletions docs/apache-airflow-providers-amazon/connections/athena.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
.. _howto/connection:athena:

Amazon Athena Connection
==========================

The Athena connection type enables DB API 2.0 integrations with Athena.

.. note::
This connection type is meant to be used with ``AthenaSqlHook``.
For ``AthenaHook`` use the `Amazon Web Services Connection <./aws.rst>`_ Type instead.

Authenticating to Amazon Athena
---------------------------------

Authentication may be performed using any of the authentication methods supported by `Amazon Web Services Connection <./aws.rst>`_.

Default Connection IDs
----------------------

The default connection ID is ``athena_default``.

Configuring the Connection
--------------------------

Schema (optional)
Specify the Amazon Athena database name.

Extra
Specify the extra parameters (as json dictionary) that can be used in
Amazon Athena connection.

* ``region_name``: AWS Region for the connection (mandatory).
* ``work_group``: Athena work group to use (optional).
* ``s3_staging_dir``: Athena S3 staging directory (optional).

.. note::
You must define either ``work_group`` or ``s3_staging_dir`` in extra field.

You can pass additional parameters to PyAthena by specifying them in the
``extra`` field of your connection as JSON. For example, to specify
please see the `documentation <https://github.com/laughingman7743/PyAthena/>`_
for PyAthena supported parameters.

Since this connection type uses authentication methods from the
`Amazon Web Services Connection <./aws.rst>`_ documentation, please refer to that
for additional information about configuring the connection.
1 change: 1 addition & 0 deletions docs/apache-airflow-providers-amazon/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ PIP package Version required
``redshift_connector`` ``>=2.0.888``
``sqlalchemy_redshift`` ``>=0.8.6``
``asgiref``
``pyAthena`` ``>=3.0.10``
======================================= ==================

Cross provider package dependencies
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ to your data in S3, define the schema, and start querying using standard SQL.
Prerequisite Tasks
------------------

.. include:: ../_partials/prerequisite_tasks.rst
.. include:: ../../_partials/prerequisite_tasks.rst

Generic Parameters
------------------

.. include:: ../_partials/generic_parameters.rst
.. include:: ../../_partials/generic_parameters.rst

Operators
---------
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
.. Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
.. http://www.apache.org/licenses/LICENSE-2.0
.. Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
===================
Amazon Athena SQL
===================

`Amazon Athena <https://aws.amazon.com/athena/>`__ is an interactive query service
that makes it easy to analyze data in Amazon Simple Storage Service (S3) using
standard SQL. Athena is serverless, so there is no infrastructure to setup or
manage, and you pay only for the queries you run. To get started, simply point
to your data in S3, define the schema, and start querying using standard SQL.

Prerequisite Tasks
------------------

.. include:: ../../_partials/prerequisite_tasks.rst

Operators
---------

Execute a SQL query
===================

The generic ``SQLExecuteQueryOperator`` can be used to execute SQL queries against Amazon Athena using a `Athena connection <../../connections/athena.rst>`_.

To execute a single SQL query against an Amazon Athena without bringing back the results to Airflow,
please use ``AthenaOperator`` instead.

.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_execute_query.py
:language: python
:dedent: 4
:start-after: [START howto_operator_sql_execute_query]
:end-before: [END howto_operator_sql_execute_query]

Also, if you need to do simple data quality tests with Amazon Athena, you can use the ``SQLTableCheckOperator``

The below example demonstrates how to instantiate the SQLTableCheckOperator task.

.. exampleinclude:: /../../tests/system/providers/common/sql/example_sql_column_table_check.py
:language: python
:dedent: 4
:start-after: [START howto_operator_sql_table_check]
:end-before: [END howto_operator_sql_table_check]

Reference
---------

* `PyAthena <https://github.com/laughingman7743/PyAthena>`__
Loading

0 comments on commit 6661272

Please sign in to comment.