You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by cl...@apache.org on 2021/03/20 16:07:31 UTC

[libcloud] branch storage-azure-integration-tests updated (4b8d856 -> 44194b9)

This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a change to branch storage-azure-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git.


 discard 4b8d856  Add integration tests for Azure Storage driver
     new 44194b9  Add integration tests for Azure Storage driver

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (4b8d856)
            \
             N -- N -- N   refs/heads/storage-azure-integration-tests (44194b9)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 libcloud/test/storage/base.py                      | 318 +++++++++++++++++
 .../test/storage/test_azure_blobs_integration.py   | 383 ++-------------------
 2 files changed, 340 insertions(+), 361 deletions(-)

[libcloud] 01/01: Add integration tests for Azure Storage driver

Posted by cl...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a commit to branch storage-azure-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git

commit 44194b9a25c29e20d536a93c5ba3dafec8419059
Author: Clemens Wolff <cl...@microsoft.com>
AuthorDate: Sat Mar 20 11:46:18 2021 -0400

    Add integration tests for Azure Storage driver
---
 libcloud/test/storage/base.py                      | 318 +++++++++++++++++++++
 .../test/storage/test_azure_blobs_integration.py   |  62 ++++
 requirements-tests.txt                             |   1 +
 3 files changed, 381 insertions(+)

diff --git a/libcloud/test/storage/base.py b/libcloud/test/storage/base.py
index f4c48e0..88ab481 100644
--- a/libcloud/test/storage/base.py
+++ b/libcloud/test/storage/base.py
@@ -18,10 +18,31 @@ __all__ = [
 ]
 
 from typing import Tuple
+import gzip
+import io
+import os
+import random
+import re
+import socket
+import string
+import tempfile
+import time
+import unittest
 
+import requests
+
+try:
+    import docker
+except ImportError:
+    docker = None
+
+from libcloud.storage import providers, types
 from libcloud.test import MockHttp  # pylint: disable-msg=E0611
 
 
+MB = 1024 * 1024
+
+
 class BaseRangeDownloadMockHttp(MockHttp):
     """
     Base MockHttp class which implements some utility methods for asserting
@@ -41,3 +62,300 @@ class BaseRangeDownloadMockHttp(MockHttp):
             end_bytes = len(body)
 
         return start_bytes, end_bytes
+
+
+class Integration:
+    class TestBase(unittest.TestCase):
+        provider = None
+        account = None
+        secret = None
+
+        def setUp(self):
+            for required in 'provider', 'account', 'secret':
+                value = getattr(self, required, None)
+                if value is None:
+                    raise unittest.SkipTest('config {} not set'.format(required))
+
+            kwargs = {'key': self.account, 'secret': self.secret}
+
+            for optional in 'host', 'port', 'secure':
+                value = getattr(self, optional, None)
+                if value is not None:
+                    kwargs[optional] = value
+
+            self.driver = providers.get_driver(self.provider)(**kwargs)
+
+        def tearDown(self):
+            for container in self.driver.list_containers():
+                for obj in container.list_objects():
+                    obj.delete()
+                container.delete()
+
+        def test_containers(self):
+            # make a new container
+            container_name = random_container_name()
+            container = self.driver.create_container(container_name)
+            self.assertEqual(container.name, container_name)
+            container = self.driver.get_container(container_name)
+            self.assertEqual(container.name, container_name)
+
+            # check that an existing container can't be re-created
+            with self.assertRaises(types.ContainerAlreadyExistsError):
+                self.driver.create_container(container_name)
+
+            # check that the new container can be listed
+            containers = self.driver.list_containers()
+            self.assertEqual([c.name for c in containers], [container_name])
+
+            # delete the container
+            self.driver.delete_container(container)
+
+            # check that a deleted container can't be looked up
+            with self.assertRaises(types.ContainerDoesNotExistError):
+                self.driver.get_container(container_name)
+
+            # check that the container is deleted
+            containers = self.driver.list_containers()
+            self.assertEqual([c.name for c in containers], [])
+
+        def _test_objects(self, do_upload, do_download, size=1 * MB):
+            content = os.urandom(size)
+            blob_name = 'testblob'
+            container = self.driver.create_container(random_container_name())
+
+            # upload a file
+            obj = do_upload(container, blob_name, content)
+            self.assertEqual(obj.name, blob_name)
+            obj = self.driver.get_object(container.name, blob_name)
+
+            # check that the file can be listed
+            blobs = self.driver.list_container_objects(container)
+            self.assertEqual([blob.name for blob in blobs], [blob_name])
+
+            # upload another file and check it's excluded in prefix listing
+            do_upload(container, blob_name[::-1], content[::-1])
+            blobs = self.driver.list_container_objects(
+                container, ex_prefix=blob_name[0:3]
+            )
+            self.assertEqual([blob.name for blob in blobs], [blob_name])
+
+            # check that the file can be read back
+            self.assertEqual(do_download(obj), content)
+
+            # delete the file
+            self.driver.delete_object(obj)
+
+            # check that a missing file can't be deleted or looked up
+            with self.assertRaises(types.ObjectDoesNotExistError):
+                self.driver.delete_object(obj)
+            with self.assertRaises(types.ObjectDoesNotExistError):
+                self.driver.get_object(container.name, blob_name)
+
+            # check that the file is deleted
+            blobs = self.driver.list_container_objects(container)
+            self.assertEqual([blob.name for blob in blobs], [blob_name[::-1]])
+
+        def test_objects(self, size=1 * MB):
+            def do_upload(container, blob_name, content):
+                infile = self._create_tempfile(content=content)
+                return self.driver.upload_object(infile, container, blob_name)
+
+            def do_download(obj):
+                outfile = self._create_tempfile()
+                self.driver.download_object(obj, outfile, overwrite_existing=True)
+                with open(outfile, 'rb') as fobj:
+                    return fobj.read()
+
+            self._test_objects(do_upload, do_download, size)
+
+        def test_objects_range_downloads(self):
+            blob_name = 'testblob-range'
+            content = b'0123456789'
+            container = self.driver.create_container(random_container_name())
+
+            obj = self.driver.upload_object(self._create_tempfile(content=content), container, blob_name)
+            self.assertEqual(obj.name, blob_name)
+            self.assertEqual(obj.size, len(content))
+
+            obj = self.driver.get_object(container.name, blob_name)
+            self.assertEqual(obj.name, blob_name)
+            self.assertEqual(obj.size, len(content))
+
+            values = [
+                {'start_bytes': 0, 'end_bytes': 1, 'expected_content': b'0'},
+                {'start_bytes': 1, 'end_bytes': 5, 'expected_content': b'1234'},
+                {'start_bytes': 5, 'end_bytes': None, 'expected_content': b'56789'},
+                {'start_bytes': 5, 'end_bytes': len(content), 'expected_content': b'56789'},
+                {'start_bytes': 0, 'end_bytes': None, 'expected_content': b'0123456789'},
+                {'start_bytes': 0, 'end_bytes': len(content), 'expected_content': b'0123456789'},
+            ]
+
+            for value in values:
+                # 1. download_object_range
+                start_bytes = value['start_bytes']
+                end_bytes = value['end_bytes']
+                outfile = self._create_tempfile()
+
+                result = self.driver.download_object_range(
+                    obj,
+                    outfile,
+                    start_bytes=start_bytes,
+                    end_bytes=end_bytes,
+                    overwrite_existing=True,
+                )
+                self.assertTrue(result)
+
+                with open(outfile, 'rb') as fobj:
+                    downloaded_content = fobj.read()
+
+                if end_bytes is not None:
+                    expected_content = content[start_bytes:end_bytes]
+                else:
+                    expected_content = content[start_bytes:]
+
+                msg = 'Expected "%s", got "%s" for values: %s' % (expected_content, downloaded_content, str(value))
+                self.assertEqual(downloaded_content, expected_content, msg)
+                self.assertEqual(downloaded_content, value['expected_content'], msg)
+
+                # 2. download_object_range_as_stream
+                downloaded_content = read_stream(
+                    self.driver.download_object_range_as_stream(
+                        obj, start_bytes=start_bytes, end_bytes=end_bytes
+                    )
+                )
+                self.assertEqual(downloaded_content, expected_content)
+
+        @unittest.skipUnless(os.getenv('LARGE_FILE_SIZE_MB'), 'config not set')
+        def test_objects_large(self):
+            size = int(float(os.environ['LARGE_FILE_SIZE_MB']) * MB)
+            self.test_objects(size)
+
+        def test_objects_stream_io(self):
+            def do_upload(container, blob_name, content):
+                content = io.BytesIO(content)
+                return self.driver.upload_object_via_stream(content, container, blob_name)
+
+            def do_download(obj):
+                return read_stream(self.driver.download_object_as_stream(obj))
+
+            self._test_objects(do_upload, do_download)
+
+        def test_objects_stream_iterable(self):
+            def do_upload(container, blob_name, content):
+                content = iter([content[i : i + 1] for i in range(len(content))])
+                return self.driver.upload_object_via_stream(content, container, blob_name)
+
+            def do_download(obj):
+                return read_stream(self.driver.download_object_as_stream(obj))
+
+            self._test_objects(do_upload, do_download)
+
+        def test_upload_via_stream_with_content_encoding(self):
+            object_name = 'content_encoding.gz'
+            content = gzip.compress(os.urandom(MB // 100))
+            container = self.driver.create_container(random_container_name())
+            self.driver.upload_object_via_stream(
+                iter(content),
+                container,
+                object_name,
+                headers={'Content-Encoding': 'gzip'},
+            )
+
+            obj = self.driver.get_object(container.name, object_name)
+
+            self.assertEqual(obj.extra.get('content_encoding'), 'gzip')
+
+        def test_cdn_url(self):
+            content = os.urandom(MB // 100)
+            container = self.driver.create_container(random_container_name())
+            obj = self.driver.upload_object_via_stream(iter(content), container, 'cdn')
+
+            response = requests.get(self.driver.get_object_cdn_url(obj))
+            response.raise_for_status()
+
+            self.assertEqual(response.content, content)
+
+        def _create_tempfile(self, prefix='', content=b''):
+            fobj, path = tempfile.mkstemp(prefix=prefix, text=False)
+            os.write(fobj, content)
+            os.close(fobj)
+            self.addCleanup(os.remove, path)
+            return path
+
+    class ContainerTestBase(TestBase):
+        image = None
+        version = 'latest'
+        environment = {}
+
+        host = 'localhost'
+        port = None
+        secure = False
+
+        client = None
+        container = None
+
+        @classmethod
+        def setUpClass(cls):
+            if docker is None:
+                raise unittest.SkipTest('missing docker library')
+
+            try:
+                cls.client = docker.from_env()
+            except docker.errors.DockerException:
+                raise unittest.SkipTest('unable to create docker client')
+
+            for required in 'image', 'port':
+                value = getattr(cls, required, None)
+                if value is None:
+                    raise unittest.SkipTest('config {} not set'.format(required))
+
+            cls.container = cls.client.containers.run(
+                '{}:{}'.format(cls.image, cls.version),
+                detach=True,
+                auto_remove=True,
+                ports={cls.port: cls.port},
+                environment=cls.environment,
+            )
+
+            timeout = 10
+            start_time = time.perf_counter()
+
+            while True:
+                try:
+                    with socket.create_connection((cls.host, cls.port), timeout=timeout):
+                        break
+                except OSError as ex:
+                    time.sleep(1)
+                    if time.perf_counter() - start_time >= timeout:
+                        raise TimeoutError(
+                            'Waited too long for the port {} on host {} to start accepting '
+                            'connections.'.format(cls.port, cls.host)
+                        ) from ex
+
+        @classmethod
+        def tearDownClass(cls):
+            for line in cls.container.logs().splitlines():
+                print(line)
+            cls.container.kill()
+
+
+def random_string(length, alphabet=string.ascii_lowercase + string.digits):
+    return "".join(random.choice(alphabet) for _ in range(length))
+
+
+def random_container_name(prefix="test"):
+    max_length = 63
+    suffix = random_string(max_length)
+    name = prefix + suffix
+    name = re.sub("[^a-z0-9-]", "-", name)
+    name = re.sub("-+", "-", name)
+    name = name[:max_length]
+    name = name.lower()
+    return name
+
+
+def read_stream(stream):
+    buffer = io.BytesIO()
+    buffer.writelines(stream)
+    buffer.seek(0)
+    return buffer.read()
diff --git a/libcloud/test/storage/test_azure_blobs_integration.py b/libcloud/test/storage/test_azure_blobs_integration.py
new file mode 100644
index 0000000..17d850a
--- /dev/null
+++ b/libcloud/test/storage/test_azure_blobs_integration.py
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the 'License'); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import string
+import sys
+import unittest
+
+from libcloud.test.storage.base import Integration, random_string
+
+
+class AzuriteStorageTest(Integration.ContainerTestBase):
+    provider = 'azure_blobs'
+
+    account = 'devstoreaccount1'
+    secret = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='  # pylint: disable=line-too-long
+
+    image = 'arafato/azurite'
+    port = 10000
+    environment = {'executable': 'blob'}
+
+    has_sas_support = False
+
+    def test_cdn_url(self):
+        if not self.has_sas_support:
+            self.skipTest('Storage backend has no account SAS support')
+
+
+class AzuriteV3StorageTest(AzuriteStorageTest):
+    image = 'mcr.microsoft.com/azure-storage/azurite'
+
+    has_sas_support = True
+
+    def test_upload_via_stream_with_content_encoding(self):
+        self.skipTest('Possible bug in AzuriteV3, see https://github.com/Azure/Azurite/issues/629')
+
+
+class IotedgeStorageTest(Integration.ContainerTestBase):
+    provider = 'azure_blobs'
+
+    account = random_string(10, string.ascii_lowercase)
+    secret = base64.b64encode(random_string(20).encode('ascii')).decode('ascii')
+
+    image = 'mcr.microsoft.com/azure-blob-storage'
+    port = 11002
+    environment = {'LOCAL_STORAGE_ACCOUNT_NAME': account, 'LOCAL_STORAGE_ACCOUNT_KEY': secret}
+
+
+if __name__ == '__main__':
+    sys.exit(unittest.main())
diff --git a/requirements-tests.txt b/requirements-tests.txt
index 7ebe7ca..b85a08b 100644
--- a/requirements-tests.txt
+++ b/requirements-tests.txt
@@ -5,6 +5,7 @@ pylint==2.4.4
 mock==3.0.5
 codecov==2.1.10
 coverage==4.5.4
+docker
 requests
 requests_mock
 pytest==5.3.2