You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by el...@apache.org on 2024/02/07 05:51:27 UTC

(airflow) branch main updated: remove info log from download_file (#37211)

This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new d8ce75cd53 remove info log from download_file (#37211)
d8ce75cd53 is described below

commit d8ce75cd53dd3dd76ec86c65083dee32d6ae3bda
Author: Kalyan <ka...@live.com>
AuthorDate: Wed Feb 7 11:21:20 2024 +0530

    remove info log from download_file (#37211)
    
    Signed-off-by: kalyanr <ka...@live.com>
---
 airflow/providers/amazon/aws/hooks/s3.py | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py
index 1a961e583c..28b9adf7c6 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -1369,6 +1369,10 @@ class S3Hook(AwsBaseHook):
         """
         Download a file from the S3 location to the local file system.
 
+        Note:
+            This function shadows the 'download_file' method of S3 API, but it is not the same.
+            If you want to use the original method from S3 API, please use 'S3Hook.get_conn().download_file()'
+
         .. seealso::
             - :external+boto3:py:meth:`S3.Object.download_fileobj`
 
@@ -1386,12 +1390,6 @@ class S3Hook(AwsBaseHook):
             Default: True.
         :return: the file name.
         """
-        self.log.info(
-            "This function shadows the 'download_file' method of S3 API, but it is not the same. If you "
-            "want to use the original method from S3 API, please call "
-            "'S3Hook.get_conn().download_file()'"
-        )
-
         self.log.info("Downloading source S3 file from Bucket %s with path %s", bucket_name, key)
 
         try: