You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by el...@apache.org on 2022/01/06 08:54:39 UTC

[airflow] branch main updated: Standardize DynamoDB naming (#20360)

This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1fc0fa5  Standardize DynamoDB naming (#20360)
1fc0fa5 is described below

commit 1fc0fa5ea96913faf78a5bf5d5f75f1d2fb91e97
Author: D. Ferruzzi <fe...@amazon.com>
AuthorDate: Thu Jan 6 00:54:04 2022 -0800

    Standardize DynamoDB naming (#20360)
    
    * Standardize DynamoDB naming
---
 airflow/providers/amazon/aws/hooks/dynamodb.py        | 19 ++++++++++++++++++-
 .../providers/amazon/aws/transfers/dynamodb_to_s3.py  |  4 ++--
 .../amazon/aws/transfers/hive_to_dynamodb.py          |  4 ++--
 tests/deprecated_classes.py                           |  2 +-
 tests/providers/amazon/aws/hooks/test_dynamodb.py     |  6 +++---
 .../amazon/aws/transfers/test_dynamodb_to_s3.py       |  4 ++--
 .../amazon/aws/transfers/test_hive_to_dynamodb.py     |  6 +++---
 7 files changed, 31 insertions(+), 14 deletions(-)

diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/airflow/providers/amazon/aws/hooks/dynamodb.py
index a66f2b0..3fb5507 100644
--- a/airflow/providers/amazon/aws/hooks/dynamodb.py
+++ b/airflow/providers/amazon/aws/hooks/dynamodb.py
@@ -18,13 +18,14 @@
 
 
 """This module contains the AWS DynamoDB hook"""
+import warnings
 from typing import Iterable, List, Optional
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
 
-class AwsDynamoDBHook(AwsBaseHook):
+class DynamoDBHook(AwsBaseHook):
     """
     Interact with AWS DynamoDB.
 
@@ -59,3 +60,19 @@ class AwsDynamoDBHook(AwsBaseHook):
             return True
         except Exception as general_error:
             raise AirflowException(f"Failed to insert items in dynamodb, error: {str(general_error)}")
+
+
+class AwsDynamoDBHook(DynamoDBHook):
+    """
+    This class is deprecated.
+    Please use :class:`airflow.providers.amazon.aws.hooks.dynamodb.DynamoDBHook`.
+    """
+
+    def __init__(self, *args, **kwargs):
+        warnings.warn(
+            "This class is deprecated. "
+            "Please use :class:`airflow.providers.amazon.aws.hooks.dynamodb.DynamoDBHook`.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        super().__init__(*args, **kwargs)
diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
index 029631c..21a3db0 100644
--- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
@@ -29,7 +29,7 @@ from typing import IO, TYPE_CHECKING, Any, Callable, Dict, Optional
 from uuid import uuid4
 
 from airflow.models import BaseOperator
-from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook
+from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 
 if TYPE_CHECKING:
@@ -129,7 +129,7 @@ class DynamoDBToS3Operator(BaseOperator):
         self.aws_conn_id = aws_conn_id
 
     def execute(self, context: 'Context') -> None:
-        hook = AwsDynamoDBHook(aws_conn_id=self.aws_conn_id)
+        hook = DynamoDBHook(aws_conn_id=self.aws_conn_id)
         table = hook.get_conn().Table(self.dynamodb_table_name)
 
         scan_kwargs = copy(self.dynamodb_scan_kwargs) if self.dynamodb_scan_kwargs else {}
diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
index 2bbf99e..3953571 100644
--- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
+++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
@@ -22,7 +22,7 @@ import json
 from typing import TYPE_CHECKING, Callable, Optional, Sequence
 
 from airflow.models import BaseOperator
-from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook
+from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
 
 if TYPE_CHECKING:
@@ -96,7 +96,7 @@ class HiveToDynamoDBOperator(BaseOperator):
         self.log.info(self.sql)
 
         data = hive.get_pandas_df(self.sql, schema=self.schema)
-        dynamodb = AwsDynamoDBHook(
+        dynamodb = DynamoDBHook(
             aws_conn_id=self.aws_conn_id,
             table_name=self.table_name,
             table_keys=self.table_keys,
diff --git a/tests/deprecated_classes.py b/tests/deprecated_classes.py
index 70777d1..c212a96 100644
--- a/tests/deprecated_classes.py
+++ b/tests/deprecated_classes.py
@@ -416,7 +416,7 @@ HOOKS = [
         'airflow.contrib.hooks.aws_hook.AwsHook',
     ),
     (
-        'airflow.providers.amazon.aws.hooks.dynamodb.AwsDynamoDBHook',
+        'airflow.providers.amazon.aws.hooks.dynamodb.DynamoDBHook',
         'airflow.contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook',
     ),
     (
diff --git a/tests/providers/amazon/aws/hooks/test_dynamodb.py b/tests/providers/amazon/aws/hooks/test_dynamodb.py
index a46338f..74be10a 100644
--- a/tests/providers/amazon/aws/hooks/test_dynamodb.py
+++ b/tests/providers/amazon/aws/hooks/test_dynamodb.py
@@ -20,7 +20,7 @@
 import unittest
 import uuid
 
-from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook
+from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 
 try:
     from moto import mock_dynamodb2
@@ -32,14 +32,14 @@ class TestDynamoDBHook(unittest.TestCase):
     @unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
     @mock_dynamodb2
     def test_get_conn_returns_a_boto3_connection(self):
-        hook = AwsDynamoDBHook(aws_conn_id='aws_default')
+        hook = DynamoDBHook(aws_conn_id='aws_default')
         assert hook.get_conn() is not None
 
     @unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
     @mock_dynamodb2
     def test_insert_batch_items_dynamodb_table(self):
 
-        hook = AwsDynamoDBHook(
+        hook = DynamoDBHook(
             aws_conn_id='aws_default', table_name='test_airflow', table_keys=['id'], region_name='us-east-1'
         )
 
diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
index 8ea7737..87d9a02 100644
--- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
@@ -34,7 +34,7 @@ class DynamodbToS3Test(unittest.TestCase):
                 self.output_queue.append(json.loads(line))
 
     @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook')
-    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.AwsDynamoDBHook')
+    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBHook')
     def test_dynamodb_to_s3_success(self, mock_aws_dynamodb_hook, mock_s3_hook):
         responses = [
             {
@@ -65,7 +65,7 @@ class DynamodbToS3Test(unittest.TestCase):
         assert [{'a': 1}, {'b': 2}, {'c': 3}] == self.output_queue
 
     @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook')
-    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.AwsDynamoDBHook')
+    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBHook')
     def test_dynamodb_to_s3_with_different_aws_conn_id(self, mock_aws_dynamodb_hook, mock_s3_hook):
         responses = [
             {
diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
index 06e3f26..b75373f 100644
--- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
+++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
@@ -26,7 +26,7 @@ import pandas as pd
 
 import airflow.providers.amazon.aws.transfers.hive_to_dynamodb
 from airflow.models.dag import DAG
-from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook
+from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 
 DEFAULT_DATE = datetime.datetime(2015, 1, 1)
 DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
@@ -44,7 +44,7 @@ class TestHiveToDynamoDBOperator(unittest.TestCase):
         dag = DAG('test_dag_id', default_args=args)
         self.dag = dag
         self.sql = 'SELECT 1'
-        self.hook = AwsDynamoDBHook(aws_conn_id='aws_default', region_name='us-east-1')
+        self.hook = DynamoDBHook(aws_conn_id='aws_default', region_name='us-east-1')
 
     @staticmethod
     def process_data(data, *args, **kwargs):
@@ -53,7 +53,7 @@ class TestHiveToDynamoDBOperator(unittest.TestCase):
     @unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
     @mock_dynamodb2
     def test_get_conn_returns_a_boto3_connection(self):
-        hook = AwsDynamoDBHook(aws_conn_id='aws_default')
+        hook = DynamoDBHook(aws_conn_id='aws_default')
         assert hook.get_conn() is not None
 
     @mock.patch(