You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/09/19 11:51:08 UTC

[airflow] branch main updated: Remove redundand catch exception in Amazon Log Task Handlers (#26442)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new f51587e85e Remove redundand catch exception in Amazon Log Task Handlers (#26442)
f51587e85e is described below

commit f51587e85e3e54cf0ebb4c35fda6ed43caae284e
Author: Andrey Anshin <An...@taragol.is>
AuthorDate: Mon Sep 19 15:50:52 2022 +0400

    Remove redundand catch exception in Amazon Log Task Handlers (#26442)
---
 .../amazon/aws/log/cloudwatch_task_handler.py      | 18 ++++-------------
 .../providers/amazon/aws/log/s3_task_handler.py    | 18 ++++-------------
 .../amazon/aws/log/test_cloudwatch_task_handler.py | 23 +---------------------
 .../amazon/aws/log/test_s3_task_handler.py         | 18 -----------------
 4 files changed, 9 insertions(+), 68 deletions(-)

diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
index f2b25327aa..9d4b7fc247 100644
--- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
+++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
@@ -23,6 +23,7 @@ import watchtower
 
 from airflow.compat.functools import cached_property
 from airflow.configuration import conf
+from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
 from airflow.utils.log.file_task_handler import FileTaskHandler
 from airflow.utils.log.logging_mixin import LoggingMixin
 
@@ -51,20 +52,9 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
     @cached_property
     def hook(self):
         """Returns AwsLogsHook."""
-        remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID')
-        try:
-            from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
-
-            return AwsLogsHook(aws_conn_id=remote_conn_id, region_name=self.region_name)
-        except Exception as e:
-            self.log.error(
-                'Could not create an AwsLogsHook with connection id "%s". '
-                'Please make sure that apache-airflow[aws] is installed and '
-                'the Cloudwatch logs connection exists. Exception: "%s"',
-                remote_conn_id,
-                e,
-            )
-            return None
+        return AwsLogsHook(
+            aws_conn_id=conf.get('logging', 'REMOTE_LOG_CONN_ID'), region_name=self.region_name
+        )
 
     def _render_filename(self, ti, try_number):
         # Replace unsupported log group name characters
diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py
index 9aca0f3249..92aa556eb5 100644
--- a/airflow/providers/amazon/aws/log/s3_task_handler.py
+++ b/airflow/providers/amazon/aws/log/s3_task_handler.py
@@ -22,6 +22,7 @@ import pathlib
 
 from airflow.compat.functools import cached_property
 from airflow.configuration import conf
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 from airflow.utils.log.file_task_handler import FileTaskHandler
 from airflow.utils.log.logging_mixin import LoggingMixin
 
@@ -44,20 +45,9 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
     @cached_property
     def hook(self):
         """Returns S3Hook."""
-        remote_conn_id = conf.get('logging', 'REMOTE_LOG_CONN_ID')
-        try:
-            from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-
-            return S3Hook(remote_conn_id, transfer_config_args={"use_threads": False})
-        except Exception as e:
-            self.log.exception(
-                'Could not create an S3Hook with connection id "%s". '
-                'Please make sure that apache-airflow[aws] is installed and '
-                'the S3 connection exists. Exception : "%s"',
-                remote_conn_id,
-                e,
-            )
-            return None
+        return S3Hook(
+            aws_conn_id=conf.get('logging', 'REMOTE_LOG_CONN_ID'), transfer_config_args={"use_threads": False}
+        )
 
     def set_context(self, ti):
         super().set_context(ti)
diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
index 8c7a70be41..556a6b9338 100644
--- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
+++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 import time
 from datetime import datetime as dt
 from unittest import mock
-from unittest.mock import ANY, call
+from unittest.mock import call
 
 import pytest
 from watchtower import CloudWatchLogHandler
@@ -100,27 +100,6 @@ class TestCloudwatchTaskHandler:
     def test_hook(self):
         assert isinstance(self.cloudwatch_task_handler.hook, AwsLogsHook)
 
-    @conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
-    def test_hook_raises(self):
-        handler = CloudwatchTaskHandler(
-            self.local_log_location,
-            f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}",
-        )
-
-        with mock.patch.object(handler.log, 'error') as mock_error:
-            with mock.patch("airflow.providers.amazon.aws.hooks.logs.AwsLogsHook") as mock_hook:
-                mock_hook.side_effect = Exception('Failed to connect')
-                # Initialize the hook
-                handler.hook
-
-            mock_error.assert_called_once_with(
-                'Could not create an AwsLogsHook with connection id "%s". Please make '
-                'sure that apache-airflow[aws] is installed and the Cloudwatch '
-                'logs connection exists. Exception: "%s"',
-                'aws_default',
-                ANY,
-            )
-
     def test_handler(self):
         self.cloudwatch_task_handler.set_context(self.ti)
         assert isinstance(self.cloudwatch_task_handler.handler, CloudWatchLogHandler)
diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/tests/providers/amazon/aws/log/test_s3_task_handler.py
index b78db60c98..82d962c3ba 100644
--- a/tests/providers/amazon/aws/log/test_s3_task_handler.py
+++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py
@@ -20,7 +20,6 @@ from __future__ import annotations
 import contextlib
 import os
 from unittest import mock
-from unittest.mock import ANY
 
 import pytest
 from botocore.exceptions import ClientError
@@ -97,23 +96,6 @@ class TestS3TaskHandler:
         assert isinstance(self.s3_task_handler.hook, S3Hook)
         assert self.s3_task_handler.hook.transfer_config.use_threads is False
 
-    @conf_vars({('logging', 'remote_log_conn_id'): 'aws_default'})
-    def test_hook_raises(self):
-        handler = S3TaskHandler(self.local_log_location, self.remote_log_base)
-        with mock.patch.object(handler.log, 'error') as mock_error:
-            with mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook") as mock_hook:
-                mock_hook.side_effect = Exception('Failed to connect')
-                # Initialize the hook
-                handler.hook
-
-            mock_error.assert_called_once_with(
-                'Could not create an S3Hook with connection id "%s". Please make '
-                'sure that apache-airflow[aws] is installed and the S3 connection exists. Exception : "%s"',
-                'aws_default',
-                ANY,
-                exc_info=True,
-            )
-
     def test_log_exists(self):
         self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'')
         assert self.s3_task_handler.s3_log_exists(self.remote_log_location)