You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by "ASF GitHub Bot (JIRA)" <ji...@apache.org> on 2018/09/10 09:07:00 UTC

[jira] [Commented] (AIRFLOW-2985) Operators for S3 object copying/deleting [boto3.client.copy_object()/delete_object()]

    [ https://issues.apache.org/jira/browse/AIRFLOW-2985?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16608910#comment-16608910 ] 

ASF GitHub Bot commented on AIRFLOW-2985:
-----------------------------------------

ashb closed pull request #3823: [AIRFLOW-2985] Operators for S3 object copying/deleting
URL: https://github.com/apache/incubator-airflow/pull/3823
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/operators/s3_copy_object_operator.py b/airflow/contrib/operators/s3_copy_object_operator.py
new file mode 100644
index 0000000000..330138ed27
--- /dev/null
+++ b/airflow/contrib/operators/s3_copy_object_operator.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.hooks.S3_hook import S3Hook
+from airflow.models import BaseOperator
+from airflow.utils.decorators import apply_defaults
+
+
+class S3CopyObjectOperator(BaseOperator):
+    """
+    Creates a copy of an object that is already stored in S3.
+
+    Note: the S3 connection used here needs to have access to both
+    source and destination bucket/key.
+
+    :param source_bucket_key: The key of the source object.
+
+        It can be either full s3:// style url or relative path from root level.
+
+        When it's specified as a full s3:// url, please omit source_bucket_name.
+    :type source_bucket_key: str
+    :param dest_bucket_key: The key of the object to copy to.
+
+        The convention to specify `dest_bucket_key` is the same as `source_bucket_key`.
+    :type dest_bucket_key: str
+    :param source_bucket_name: Name of the S3 bucket where the source object is in.
+
+        It should be omitted when `source_bucket_key` is provided as a full s3:// url.
+    :type source_bucket_name: str
+    :param dest_bucket_name: Name of the S3 bucket to where the object is copied.
+
+        It should be omitted when `dest_bucket_key` is provided as a full s3:// url.
+    :type dest_bucket_name: str
+    :param source_version_id: Version ID of the source object (OPTIONAL)
+    :type source_version_id: str
+    :param aws_conn_id: Connection id of the S3 connection to use
+    :type aws_conn_id: str
+    :param verify: Whether or not to verify SSL certificates for S3 connection.
+        By default SSL certificates are verified.
+
+        You can provide the following values:
+
+        - False: do not validate SSL certificates. SSL will still be used,
+                 but SSL certificates will not be
+                 verified.
+        - path/to/cert/bundle.pem: A filename of the CA cert bundle to uses.
+                 You can specify this argument if you want to use a different
+                 CA cert bundle than the one used by botocore.
+    :type verify: bool or str
+    """
+
+    @apply_defaults
+    def __init__(
+            self,
+            source_bucket_key,
+            dest_bucket_key,
+            source_bucket_name=None,
+            dest_bucket_name=None,
+            source_version_id=None,
+            aws_conn_id='aws_default',
+            verify=None,
+            *args, **kwargs):
+        super(S3CopyObjectOperator, self).__init__(*args, **kwargs)
+
+        self.source_bucket_key = source_bucket_key
+        self.dest_bucket_key = dest_bucket_key
+        self.source_bucket_name = source_bucket_name
+        self.dest_bucket_name = dest_bucket_name
+        self.source_version_id = source_version_id
+        self.aws_conn_id = aws_conn_id
+        self.verify = verify
+
+    def execute(self, context):
+        s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
+        s3_hook.copy_object(self.source_bucket_key, self.dest_bucket_key,
+                            self.source_bucket_name, self.dest_bucket_name,
+                            self.source_version_id)
diff --git a/airflow/contrib/operators/s3_delete_objects_operator.py b/airflow/contrib/operators/s3_delete_objects_operator.py
new file mode 100644
index 0000000000..1aa1b3901e
--- /dev/null
+++ b/airflow/contrib/operators/s3_delete_objects_operator.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.S3_hook import S3Hook
+from airflow.models import BaseOperator
+from airflow.utils.decorators import apply_defaults
+
+
+class S3DeleteObjectsOperator(BaseOperator):
+    """
+    To enable users to delete single object or multiple objects from
+    a bucket using a single HTTP request.
+
+    Users may specify up to 1000 keys to delete.
+
+    :param bucket: Name of the bucket in which you are going to delete object(s)
+    :type bucket: str
+    :param keys: The key(s) to delete from S3 bucket.
+
+        When ``keys`` is a string, it's supposed to be the key name of
+        the single object to delete.
+
+        When ``keys`` is a list, it's supposed to be the list of the
+        keys to delete.
+
+        You may specify up to 1000 keys.
+    :type keys: str or list
+    :param aws_conn_id: Connection id of the S3 connection to use
+    :type aws_conn_id: str
+    :param verify: Whether or not to verify SSL certificates for S3 connection.
+        By default SSL certificates are verified.
+
+        You can provide the following values:
+
+        - False: do not validate SSL certificates. SSL will still be used,
+                 but SSL certificates will not be
+                 verified.
+        - path/to/cert/bundle.pem: A filename of the CA cert bundle to uses.
+                 You can specify this argument if you want to use a different
+                 CA cert bundle than the one used by botocore.
+    :type verify: bool or str
+    """
+
+    @apply_defaults
+    def __init__(
+            self,
+            bucket,
+            keys,
+            aws_conn_id='aws_default',
+            verify=None,
+            *args, **kwargs):
+        super(S3DeleteObjectsOperator, self).__init__(*args, **kwargs)
+        self.bucket = bucket
+        self.keys = keys
+        self.aws_conn_id = aws_conn_id
+        self.verify = verify
+
+    def execute(self, context):
+        s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
+
+        response = s3_hook.delete_objects(bucket=self.bucket, keys=self.keys)
+
+        deleted_keys = [x['Key'] for x in response.get("Deleted", [])]
+        self.log.info("Deleted: {}".format(deleted_keys))
+
+        if "Errors" in response:
+            errors_keys = [x['Key'] for x in response.get("Errors", [])]
+            raise AirflowException("Errors when deleting: {}".format(errors_keys))
diff --git a/airflow/hooks/S3_hook.py b/airflow/hooks/S3_hook.py
index 2d64b31534..b65074e954 100644
--- a/airflow/hooks/S3_hook.py
+++ b/airflow/hooks/S3_hook.py
@@ -384,3 +384,89 @@ def load_bytes(self,
 
         client = self.get_conn()
         client.upload_fileobj(filelike_buffer, bucket_name, key, ExtraArgs=extra_args)
+
+    def copy_object(self,
+                    source_bucket_key,
+                    dest_bucket_key,
+                    source_bucket_name=None,
+                    dest_bucket_name=None,
+                    source_version_id=None):
+        """
+        Creates a copy of an object that is already stored in S3.
+
+        Note: the S3 connection used here needs to have access to both
+        source and destination bucket/key.
+
+        :param source_bucket_key: The key of the source object.
+
+            It can be either full s3:// style url or relative path from root level.
+
+            When it's specified as a full s3:// url, please omit source_bucket_name.
+        :type source_bucket_key: str
+        :param dest_bucket_key: The key of the object to copy to.
+
+            The convention to specify `dest_bucket_key` is the same
+            as `source_bucket_key`.
+        :type dest_bucket_key: str
+        :param source_bucket_name: Name of the S3 bucket where the source object is in.
+
+            It should be omitted when `source_bucket_key` is provided as a full s3:// url.
+        :type source_bucket_name: str
+        :param dest_bucket_name: Name of the S3 bucket to where the object is copied.
+
+            It should be omitted when `dest_bucket_key` is provided as a full s3:// url.
+        :type dest_bucket_name: str
+        :param source_version_id: Version ID of the source object (OPTIONAL)
+        :type source_version_id: str
+        """
+
+        if dest_bucket_name is None:
+            dest_bucket_name, dest_bucket_key = self.parse_s3_url(dest_bucket_key)
+        else:
+            parsed_url = urlparse(dest_bucket_key)
+            if parsed_url.scheme != '' or parsed_url.netloc != '':
+                raise AirflowException('If dest_bucket_name is provided, ' +
+                                       'dest_bucket_key should be relative path ' +
+                                       'from root level, rather than a full s3:// url')
+
+        if source_bucket_name is None:
+            source_bucket_name, source_bucket_key = self.parse_s3_url(source_bucket_key)
+        else:
+            parsed_url = urlparse(source_bucket_key)
+            if parsed_url.scheme != '' or parsed_url.netloc != '':
+                raise AirflowException('If source_bucket_name is provided, ' +
+                                       'source_bucket_key should be relative path ' +
+                                       'from root level, rather than a full s3:// url')
+
+        CopySource = {'Bucket': source_bucket_name,
+                      'Key': source_bucket_key,
+                      'VersionId': source_version_id}
+        response = self.get_conn().copy_object(Bucket=dest_bucket_name,
+                                               Key=dest_bucket_key,
+                                               CopySource=CopySource)
+        return response
+
+    def delete_objects(self,
+                       bucket,
+                       keys):
+        """
+        :param bucket: Name of the bucket in which you are going to delete object(s)
+        :type bucket: str
+        :param keys: The key(s) to delete from S3 bucket.
+
+            When ``keys`` is a string, it's supposed to be the key name of
+            the single object to delete.
+
+            When ``keys`` is a list, it's supposed to be the list of the
+            keys to delete.
+        :type keys: str or list
+        """
+        if isinstance(keys, list):
+            keys = keys
+        else:
+            keys = [keys]
+
+        delete_dict = {"Objects": [{"Key": k} for k in keys]}
+        response = self.get_conn().delete_objects(Bucket=bucket,
+                                                  Delete=delete_dict)
+        return response
diff --git a/docs/code.rst b/docs/code.rst
index 80ec76193f..d9f62c5734 100644
--- a/docs/code.rst
+++ b/docs/code.rst
@@ -183,6 +183,8 @@ Operators
 .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleCheckOperator
 .. autoclass:: airflow.contrib.operators.qubole_check_operator.QuboleValueCheckOperator
 .. autoclass:: airflow.contrib.operators.qubole_operator.QuboleOperator
+.. autoclass:: airflow.contrib.operators.s3_copy_object_operator.S3CopyObjectOperator
+.. autoclass:: airflow.contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator
 .. autoclass:: airflow.contrib.operators.s3_list_operator.S3ListOperator
 .. autoclass:: airflow.contrib.operators.s3_to_gcs_operator.S3ToGoogleCloudStorageOperator
 .. autoclass:: airflow.contrib.operators.segment_track_event_operator.SegmentTrackEventOperator
diff --git a/tests/contrib/operators/test_s3_copy_object_operator.py b/tests/contrib/operators/test_s3_copy_object_operator.py
new file mode 100644
index 0000000000..25904ecc51
--- /dev/null
+++ b/tests/contrib/operators/test_s3_copy_object_operator.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import io
+import unittest
+
+import boto3
+from moto import mock_s3
+
+from airflow.contrib.operators.s3_copy_object_operator import S3CopyObjectOperator
+
+
+class TestS3CopyObjectOperator(unittest.TestCase):
+
+    def setUp(self):
+        self.source_bucket = "bucket1"
+        self.source_key = "path1/data.txt"
+        self.dest_bucket = "bucket2"
+        self.dest_key = "path2/data_copy.txt"
+
+    @mock_s3
+    def test_s3_copy_object_arg_combination_1(self):
+        conn = boto3.client('s3')
+        conn.create_bucket(Bucket=self.source_bucket)
+        conn.create_bucket(Bucket=self.dest_bucket)
+        conn.upload_fileobj(Bucket=self.source_bucket,
+                            Key=self.source_key,
+                            Fileobj=io.BytesIO(b"input"))
+
+        # there should be nothing found before S3CopyObjectOperator is executed
+        self.assertFalse('Contents' in conn.list_objects(Bucket=self.dest_bucket,
+                                                         Prefix=self.dest_key))
+
+        t = S3CopyObjectOperator(task_id="test_task_s3_copy_object",
+                                 source_bucket_key=self.source_key,
+                                 source_bucket_name=self.source_bucket,
+                                 dest_bucket_key=self.dest_key,
+                                 dest_bucket_name=self.dest_bucket)
+        t.execute(None)
+
+        objects_in_dest_bucket = conn.list_objects(Bucket=self.dest_bucket,
+                                                   Prefix=self.dest_key)
+        # there should be object found, and there should only be one object found
+        self.assertEqual(len(objects_in_dest_bucket['Contents']), 1)
+        # the object found should be consistent with dest_key specified earlier
+        self.assertEqual(objects_in_dest_bucket['Contents'][0]['Key'], self.dest_key)
+
+    @mock_s3
+    def test_s3_copy_object_arg_combination_2(self):
+        conn = boto3.client('s3')
+        conn.create_bucket(Bucket=self.source_bucket)
+        conn.create_bucket(Bucket=self.dest_bucket)
+        conn.upload_fileobj(Bucket=self.source_bucket,
+                            Key=self.source_key,
+                            Fileobj=io.BytesIO(b"input"))
+
+        # there should be nothing found before S3CopyObjectOperator is executed
+        self.assertFalse('Contents' in conn.list_objects(Bucket=self.dest_bucket,
+                                                         Prefix=self.dest_key))
+
+        source_key_s3_url = "s3://{}/{}".format(self.source_bucket, self.source_key)
+        dest_key_s3_url = "s3://{}/{}".format(self.dest_bucket, self.dest_key)
+        t = S3CopyObjectOperator(task_id="test_task_s3_copy_object",
+                                 source_bucket_key=source_key_s3_url,
+                                 dest_bucket_key=dest_key_s3_url)
+        t.execute(None)
+
+        objects_in_dest_bucket = conn.list_objects(Bucket=self.dest_bucket,
+                                                   Prefix=self.dest_key)
+        # there should be object found, and there should only be one object found
+        self.assertEqual(len(objects_in_dest_bucket['Contents']), 1)
+        # the object found should be consistent with dest_key specified earlier
+        self.assertEqual(objects_in_dest_bucket['Contents'][0]['Key'], self.dest_key)
diff --git a/tests/contrib/operators/test_s3_delete_objects_operator.py b/tests/contrib/operators/test_s3_delete_objects_operator.py
new file mode 100644
index 0000000000..05ce1f57db
--- /dev/null
+++ b/tests/contrib/operators/test_s3_delete_objects_operator.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import io
+import unittest
+
+import boto3
+from moto import mock_s3
+
+from airflow.contrib.operators.s3_delete_objects_operator import S3DeleteObjectsOperator
+
+
+class TestS3DeleteObjectsOperator(unittest.TestCase):
+
+    @mock_s3
+    def test_s3_delete_single_object(self):
+        bucket = "testbucket"
+        key = "path/data.txt"
+
+        conn = boto3.client('s3')
+        conn.create_bucket(Bucket=bucket)
+        conn.upload_fileobj(Bucket=bucket,
+                            Key=key,
+                            Fileobj=io.BytesIO(b"input"))
+
+        # The object should be detected before the DELETE action is taken
+        objects_in_dest_bucket = conn.list_objects(Bucket=bucket,
+                                                   Prefix=key)
+        self.assertEqual(len(objects_in_dest_bucket['Contents']), 1)
+        self.assertEqual(objects_in_dest_bucket['Contents'][0]['Key'], key)
+
+        t = S3DeleteObjectsOperator(task_id="test_task_s3_delete_single_object",
+                                    bucket=bucket,
+                                    keys=key)
+        t.execute(None)
+
+        # There should be no object found in the bucket created earlier
+        self.assertFalse('Contents' in conn.list_objects(Bucket=bucket,
+                                                         Prefix=key))
+
+    @mock_s3
+    def test_s3_delete_multiple_objects(self):
+        bucket = "testbucket"
+        key_pattern = "path/data"
+        n_keys = 3
+        keys = [key_pattern + str(i) for i in range(n_keys)]
+
+        conn = boto3.client('s3')
+        conn.create_bucket(Bucket=bucket)
+        for k in keys:
+            conn.upload_fileobj(Bucket=bucket,
+                                Key=k,
+                                Fileobj=io.BytesIO(b"input"))
+
+        # The objects should be detected before the DELETE action is taken
+        objects_in_dest_bucket = conn.list_objects(Bucket=bucket,
+                                                   Prefix=key_pattern)
+        self.assertEqual(len(objects_in_dest_bucket['Contents']), n_keys)
+        self.assertEqual(sorted([x['Key'] for x in objects_in_dest_bucket['Contents']]),
+                         sorted(keys))
+
+        t = S3DeleteObjectsOperator(task_id="test_task_s3_delete_multiple_objects",
+                                    bucket=bucket,
+                                    keys=keys)
+        t.execute(None)
+
+        # There should be no object found in the bucket created earlier
+        self.assertFalse('Contents' in conn.list_objects(Bucket=bucket,
+                                                         Prefix=key_pattern))


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> Operators for S3 object copying/deleting [boto3.client.copy_object()/delete_object()]
> -------------------------------------------------------------------------------------
>
>                 Key: AIRFLOW-2985
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-2985
>             Project: Apache Airflow
>          Issue Type: Improvement
>          Components: operators
>            Reporter: Xiaodong DENG
>            Assignee: Xiaodong DENG
>            Priority: Minor
>             Fix For: 2.0.0
>
>
> Currently we don't have an operator in Airflow to help copy/delete objects within S3, while they may be quite common use case when we deal with the data in S3.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)