You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by cl...@apache.org on 2021/05/07 01:53:26 UTC

[libcloud] branch storage-s3-integration-tests created (now 305d680)

This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a change to branch storage-s3-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git.


      at 305d680  Add integration tests for S3

This branch includes the following new commits:

     new 892d979  Enable tests to work with existing containers
     new 305d680  Add integration tests for S3

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


[libcloud] 01/02: Enable tests to work with existing containers

Posted by cl...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a commit to branch storage-s3-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git

commit 892d97952fc76e789d18d678d0c4ea01af1b89b6
Author: Clemens Wolff <cl...@apache.org>
AuthorDate: Thu May 6 21:51:28 2021 -0400

    Enable tests to work with existing containers
---
 integration/storage/base.py | 41 +++++++++++++++++++++++------------------
 1 file changed, 23 insertions(+), 18 deletions(-)

diff --git a/integration/storage/base.py b/integration/storage/base.py
index 56786cd..902ccd6 100644
--- a/integration/storage/base.py
+++ b/integration/storage/base.py
@@ -45,6 +45,9 @@ class Integration:
         account = None
         secret = None
 
+        container_name_prefix = 'lcsit'
+        container_name_max_length = 63
+
         def setUp(self):
             for required in 'provider', 'account', 'secret':
                 value = getattr(self, required, None)
@@ -63,6 +66,9 @@ class Integration:
 
         def tearDown(self):
             for container in self.driver.list_containers():
+                if not container.name.startswith(self.container_name_prefix):
+                    continue
+
                 for obj in container.list_objects():
                     try:
                         obj.delete()
@@ -86,7 +92,7 @@ class Integration:
 
         def test_containers(self):
             # make a new container
-            container_name = random_container_name()
+            container_name = self._random_container_name()
             container = self.driver.create_container(container_name)
             self.assertEqual(container.name, container_name)
             container = self.driver.get_container(container_name)
@@ -98,7 +104,7 @@ class Integration:
 
             # check that the new container can be listed
             containers = self.driver.list_containers()
-            self.assertEqual([c.name for c in containers], [container_name])
+            self.assertIn(container_name, [c.name for c in containers])
 
             # delete the container
             self.driver.delete_container(container)
@@ -109,12 +115,12 @@ class Integration:
 
             # check that the container is deleted
             containers = self.driver.list_containers()
-            self.assertEqual([c.name for c in containers], [])
+            self.assertNotIn(container_name, [c.name for c in containers])
 
         def _test_objects(self, do_upload, do_download, size=1 * MB):
             content = os.urandom(size)
             blob_name = 'testblob'
-            container = self.driver.create_container(random_container_name())
+            container = self.driver.create_container(self._random_container_name())
 
             # upload a file
             obj = do_upload(container, blob_name, content)
@@ -167,7 +173,7 @@ class Integration:
         def test_objects_range_downloads(self):
             blob_name = 'testblob-range'
             content = b'0123456789'
-            container = self.driver.create_container(random_container_name())
+            container = self.driver.create_container(self._random_container_name())
 
             obj = self.driver.upload_object(
                 self._create_tempfile(content=content),
@@ -255,7 +261,7 @@ class Integration:
         def test_upload_via_stream_with_content_encoding(self):
             object_name = 'content_encoding.gz'
             content = gzip.compress(os.urandom(MB // 100))
-            container = self.driver.create_container(random_container_name())
+            container = self.driver.create_container(self._random_container_name())
             self.driver.upload_object_via_stream(
                 iter(content),
                 container,
@@ -269,7 +275,7 @@ class Integration:
 
         def test_cdn_url(self):
             content = os.urandom(MB // 100)
-            container = self.driver.create_container(random_container_name())
+            container = self.driver.create_container(self._random_container_name())
             obj = self.driver.upload_object_via_stream(iter(content), container, 'cdn')
 
             response = requests.get(self.driver.get_object_cdn_url(obj))
@@ -284,6 +290,16 @@ class Integration:
             self.addCleanup(os.remove, path)
             return path
 
+        @classmethod
+        def _random_container_name(cls):
+            suffix = random_string(cls.container_name_max_length)
+            name = cls.container_name_prefix + suffix
+            name = re.sub('[^a-z0-9-]', '-', name)
+            name = re.sub('-+', '-', name)
+            name = name[:cls.container_name_max_length]
+            name = name.lower()
+            return name
+
     class ContainerTestBase(TestBase):
         image = None
         version = 'latest'
@@ -373,17 +389,6 @@ def random_string(length, alphabet=string.ascii_lowercase + string.digits):
     return ''.join(random.choice(alphabet) for _ in range(length))
 
 
-def random_container_name(prefix='test'):
-    max_length = 63
-    suffix = random_string(max_length)
-    name = prefix + suffix
-    name = re.sub('[^a-z0-9-]', '-', name)
-    name = re.sub('-+', '-', name)
-    name = name[:max_length]
-    name = name.lower()
-    return name
-
-
 def read_stream(stream):
     buffer = io.BytesIO()
     buffer.writelines(stream)

[libcloud] 02/02: Add integration tests for S3

Posted by cl...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a commit to branch storage-s3-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git

commit 305d680fde4c0398d9cc1a7e31428309fe5b0fbb
Author: Clemens Wolff <cl...@apache.org>
AuthorDate: Thu May 6 21:53:07 2021 -0400

    Add integration tests for S3
---
 integration/storage/base.py          |  8 ++--
 integration/storage/requirements.txt |  1 +
 integration/storage/test_s3.py       | 78 ++++++++++++++++++++++++++++++++++++
 3 files changed, 84 insertions(+), 3 deletions(-)

diff --git a/integration/storage/base.py b/integration/storage/base.py
index 902ccd6..dda7d06 100644
--- a/integration/storage/base.py
+++ b/integration/storage/base.py
@@ -98,9 +98,7 @@ class Integration:
             container = self.driver.get_container(container_name)
             self.assertEqual(container.name, container_name)
 
-            # check that an existing container can't be re-created
-            with self.assertRaises(types.ContainerAlreadyExistsError):
-                self.driver.create_container(container_name)
+            self.assert_existing_container_cannot_be_recreated(container)
 
             # check that the new container can be listed
             containers = self.driver.list_containers()
@@ -151,6 +149,10 @@ class Integration:
             blobs = self.driver.list_container_objects(container)
             self.assertEqual([blob.name for blob in blobs], [blob_name[::-1]])
 
+        def assert_existing_container_cannot_be_recreated(self, container):
+            with self.assertRaises(types.ContainerAlreadyExistsError):
+                self.driver.create_container(container.name)
+
         def assert_file_is_missing(self, container, obj):
             with self.assertRaises(types.ObjectDoesNotExistError):
                 self.driver.delete_object(obj)
diff --git a/integration/storage/requirements.txt b/integration/storage/requirements.txt
index cbdd599..d54d412 100644
--- a/integration/storage/requirements.txt
+++ b/integration/storage/requirements.txt
@@ -1,5 +1,6 @@
 azure-identity
 azure-mgmt-resource
 azure-mgmt-storage
+boto3
 docker
 requests
diff --git a/integration/storage/test_s3.py b/integration/storage/test_s3.py
new file mode 100644
index 0000000..8276b80
--- /dev/null
+++ b/integration/storage/test_s3.py
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the 'License'); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+import unittest
+
+try:
+    import boto3
+except ImportError:
+    boto3 = None
+
+from integration.storage.base import Integration
+from libcloud.storage import types
+
+
+class S3Test(Integration.TestBase):
+    provider = 's3'
+
+    @classmethod
+    def setUpClass(cls):
+        if boto3 is None:
+            raise unittest.SkipTest('missing boto3 library')
+
+        config = {
+            key: os.getenv(key)
+            for key in (
+                'AWS_ACCESS_KEY_ID',
+                'AWS_ACCESS_KEY_SECRET',
+            )
+        }
+
+        for key, value in config.items():
+            if not value:
+                raise unittest.SkipTest('missing environment variable %s' % key)
+
+        cls.account = config['AWS_ACCESS_KEY_ID']
+        cls.secret = config['AWS_ACCESS_KEY_SECRET']
+
+    @classmethod
+    def tearDownClass(cls):
+        client = boto3.Session(
+            aws_access_key_id=cls.account,
+            aws_secret_access_key=cls.secret,
+        ).client('s3')
+
+        buckets = (
+            item['Name']
+            for item in client.list_buckets()['Buckets']
+            if item['Name'].startswith(cls.container_name_prefix)
+        )
+
+        for name in buckets:
+            bucket = boto3.resource('s3').Bucket(name)
+            bucket.objects.delete()
+            client.delete_bucket(name)
+
+    def assert_existing_container_cannot_be_recreated(self, container):
+        pass
+
+    def assert_file_is_missing(self, container, obj):
+        with self.assertRaises(types.ObjectDoesNotExistError):
+            self.driver.get_object(container.name, obj.name)
+
+
+if __name__ == '__main__':
+    sys.exit(unittest.main())