You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by cl...@apache.org on 2021/03/20 15:51:30 UTC

[libcloud] branch storage-azure-integration-tests created (now 4b8d856)

This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a change to branch storage-azure-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git.


      at 4b8d856  Add integration tests for Azure Storage driver

This branch includes the following new commits:

     new 4b8d856  Add integration tests for Azure Storage driver

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


[libcloud] 01/01: Add integration tests for Azure Storage driver

Posted by cl...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

clewolff pushed a commit to branch storage-azure-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git

commit 4b8d856672b3ebae3cba94f2c5f2252338d7c021
Author: Clemens Wolff <cl...@microsoft.com>
AuthorDate: Sat Mar 20 11:46:18 2021 -0400

    Add integration tests for Azure Storage driver
---
 .../test/storage/test_azure_blobs_integration.py   | 401 +++++++++++++++++++++
 requirements-tests.txt                             |   1 +
 2 files changed, 402 insertions(+)

diff --git a/libcloud/test/storage/test_azure_blobs_integration.py b/libcloud/test/storage/test_azure_blobs_integration.py
new file mode 100644
index 0000000..ee813f6
--- /dev/null
+++ b/libcloud/test/storage/test_azure_blobs_integration.py
@@ -0,0 +1,401 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import base64
+import gzip
+import io
+import os
+import random
+import sys
+import re
+import socket
+import string
+import tempfile
+import time
+import unittest
+
+import requests
+
+try:
+    import docker
+except ImportError:
+    docker = None
+
+from libcloud.storage import providers, types
+
+MB = 1024 * 1024
+
+
+class Base:
+    class TestBase(unittest.TestCase):
+        provider = "azure_blobs"
+        account = None
+        secret = None
+
+        def setUp(self):
+            for required in "provider", "account", "secret":
+                value = getattr(self, required, None)
+                if value is None:
+                    raise unittest.SkipTest("config {} not set".format(required))
+
+            kwargs = {"key": self.account, "secret": self.secret}
+
+            for optional in "host", "port", "secure":
+                value = getattr(self, optional, None)
+                if value is not None:
+                    kwargs[optional] = value
+
+            self.driver = providers.get_driver(self.provider)(**kwargs)
+
+        def tearDown(self):
+            for container in self.driver.list_containers():
+                for obj in container.list_objects():
+                    obj.delete()
+                container.delete()
+
+        def test_containers(self):
+            # make a new container
+            container_name = _random_container_name()
+            container = self.driver.create_container(container_name)
+            self.assertEqual(container.name, container_name)
+            container = self.driver.get_container(container_name)
+            self.assertEqual(container.name, container_name)
+
+            # check that an existing container can't be re-created
+            with self.assertRaises(types.ContainerAlreadyExistsError):
+                self.driver.create_container(container_name)
+
+            # check that the new container can be listed
+            containers = self.driver.list_containers()
+            self.assertEqual([c.name for c in containers], [container_name])
+
+            # delete the container
+            self.driver.delete_container(container)
+
+            # check that a deleted container can't be looked up
+            with self.assertRaises(types.ContainerDoesNotExistError):
+                self.driver.get_container(container_name)
+
+            # check that the container is deleted
+            containers = self.driver.list_containers()
+            self.assertEqual([c.name for c in containers], [])
+
+        def _test_objects(self, do_upload, do_download, size=1 * MB):
+            content = os.urandom(size)
+            blob_name = "testblob"
+            container = self.driver.create_container(_random_container_name())
+
+            # upload a file
+            obj = do_upload(container, blob_name, content)
+            self.assertEqual(obj.name, blob_name)
+            obj = self.driver.get_object(container.name, blob_name)
+
+            # check that the file can be listed
+            blobs = self.driver.list_container_objects(container)
+            self.assertEqual([blob.name for blob in blobs], [blob_name])
+
+            # upload another file and check it's excluded in prefix listing
+            do_upload(container, blob_name[::-1], content[::-1])
+            blobs = self.driver.list_container_objects(
+                container, ex_prefix=blob_name[0:3]
+            )
+            self.assertEqual([blob.name for blob in blobs], [blob_name])
+
+            # check that the file can be read back
+            self.assertEqual(do_download(obj), content)
+
+            # delete the file
+            self.driver.delete_object(obj)
+
+            # check that a missing file can't be deleted or looked up
+            with self.assertRaises(types.ObjectDoesNotExistError):
+                self.driver.delete_object(obj)
+            with self.assertRaises(types.ObjectDoesNotExistError):
+                self.driver.get_object(container.name, blob_name)
+
+            # check that the file is deleted
+            blobs = self.driver.list_container_objects(container)
+            self.assertEqual([blob.name for blob in blobs], [blob_name[::-1]])
+
+        def test_objects(self, size=1 * MB):
+            def do_upload(container, blob_name, content):
+                infile = self._create_tempfile(content=content)
+                return self.driver.upload_object(infile, container, blob_name)
+
+            def do_download(obj):
+                outfile = self._create_tempfile()
+                self.driver.download_object(obj, outfile, overwrite_existing=True)
+                with open(outfile, "rb") as fobj:
+                    return fobj.read()
+
+            self._test_objects(do_upload, do_download, size)
+
+        def test_objects_range_downloads(self):
+            blob_name = "testblob-range"
+            content = b"0123456789"
+            container = self.driver.create_container(_random_container_name())
+
+            infile = self._create_tempfile(content=content)
+            obj = self.driver.upload_object(infile, container, blob_name)
+            self.assertEqual(obj.name, blob_name)
+            self.assertEqual(obj.size, len(content))
+
+            obj = self.driver.get_object(container.name, blob_name)
+            self.assertEqual(obj.name, blob_name)
+            self.assertEqual(obj.size, len(content))
+
+            values = [
+                {"start_bytes": 0, "end_bytes": 1, "expected_content": b"0"},
+                {"start_bytes": 1, "end_bytes": 5, "expected_content": b"1234"},
+                {"start_bytes": 5, "end_bytes": None, "expected_content": b"56789"},
+                {
+                    "start_bytes": 5,
+                    "end_bytes": len(content),
+                    "expected_content": b"56789",
+                },
+                {
+                    "start_bytes": 0,
+                    "end_bytes": None,
+                    "expected_content": b"0123456789",
+                },
+                {
+                    "start_bytes": 0,
+                    "end_bytes": len(content),
+                    "expected_content": b"0123456789",
+                },
+            ]
+
+            for value in values:
+                # 1. download_object_range
+                start_bytes = value["start_bytes"]
+                end_bytes = value["end_bytes"]
+                outfile = self._create_tempfile()
+
+                result = self.driver.download_object_range(
+                    obj,
+                    outfile,
+                    start_bytes=start_bytes,
+                    end_bytes=end_bytes,
+                    overwrite_existing=True,
+                )
+                self.assertTrue(result)
+
+                with open(outfile, "rb") as fobj:
+                    downloaded_content = fobj.read()
+
+                if end_bytes is not None:
+                    expected_content = content[start_bytes:end_bytes]
+                else:
+                    expected_content = content[start_bytes:]
+
+                msg = 'Expected "%s", got "%s" for values: %s' % (
+                    expected_content,
+                    downloaded_content,
+                    str(value),
+                )
+                self.assertEqual(downloaded_content, expected_content, msg)
+                self.assertEqual(downloaded_content, value["expected_content"], msg)
+
+                # 2. download_object_range_as_stream
+                downloaded_content = _read_stream(
+                    self.driver.download_object_range_as_stream(
+                        obj, start_bytes=start_bytes, end_bytes=end_bytes
+                    )
+                )
+                self.assertEqual(downloaded_content, expected_content)
+
+        @unittest.skipUnless(os.getenv("LARGE_FILE_SIZE_MB"), "config not set")
+        def test_objects_large(self):
+            size = int(float(os.environ["LARGE_FILE_SIZE_MB"]) * MB)
+            self.test_objects(size)
+
+        def test_objects_stream_io(self):
+            def do_upload(container, blob_name, content):
+                content = io.BytesIO(content)
+                return self.driver.upload_object_via_stream(
+                    content, container, blob_name
+                )
+
+            def do_download(obj):
+                return _read_stream(self.driver.download_object_as_stream(obj))
+
+            self._test_objects(do_upload, do_download)
+
+        def test_objects_stream_iterable(self):
+            def do_upload(container, blob_name, content):
+                content = iter([content[i : i + 1] for i in range(len(content))])
+                return self.driver.upload_object_via_stream(
+                    content, container, blob_name
+                )
+
+            def do_download(obj):
+                return _read_stream(self.driver.download_object_as_stream(obj))
+
+            self._test_objects(do_upload, do_download)
+
+        def test_upload_via_stream_with_content_encoding(self):
+            object_name = "content_encoding.gz"
+            content = gzip.compress(os.urandom(MB // 100))
+            container = self.driver.create_container(_random_container_name())
+            self.driver.upload_object_via_stream(
+                iter(content),
+                container,
+                object_name,
+                headers={"Content-Encoding": "gzip"},
+            )
+
+            obj = self.driver.get_object(container.name, object_name)
+
+            self.assertEqual(obj.extra.get("content_encoding"), "gzip")
+
+        def test_cdn_url(self):
+            content = os.urandom(MB // 100)
+            container = self.driver.create_container(_random_container_name())
+            obj = self.driver.upload_object_via_stream(iter(content), container, "cdn")
+
+            response = requests.get(self.driver.get_object_cdn_url(obj))
+            response.raise_for_status()
+
+            self.assertEqual(response.content, content)
+
+        def _create_tempfile(self, prefix="", content=b""):
+            fobj, path = tempfile.mkstemp(prefix=prefix, text=False)
+            os.write(fobj, content)
+            os.close(fobj)
+            self.addCleanup(os.remove, path)
+            return path
+
+    class ContainerTestBase(TestBase):
+        image = None
+        version = "latest"
+        environment = {}
+
+        host = "localhost"
+        port = None
+        secure = False
+
+        client = None
+        container = None
+
+        @classmethod
+        def setUpClass(cls):
+            if docker is None:
+                raise unittest.SkipTest("missing docker library")
+
+            try:
+                cls.client = docker.from_env()
+            except docker.errors.DockerException:
+                raise unittest.SkipTest("unable to create docker client")
+
+            for required in "image", "port":
+                value = getattr(cls, required, None)
+                if value is None:
+                    raise unittest.SkipTest("config {} not set".format(required))
+
+            cls.container = cls.client.containers.run(
+                "{}:{}".format(cls.image, cls.version),
+                detach=True,
+                auto_remove=True,
+                ports={cls.port: cls.port},
+                environment=cls.environment,
+            )
+
+            timeout = 10
+            start_time = time.perf_counter()
+
+            while True:
+                try:
+                    with socket.create_connection(
+                        (cls.host, cls.port), timeout=timeout
+                    ):
+                        break
+                except OSError as ex:
+                    time.sleep(1)
+                    if time.perf_counter() - start_time >= timeout:
+                        raise TimeoutError(
+                            "Waited too long for the port {} on host {} to start accepting "
+                            "connections.".format(cls.port, cls.host)
+                        ) from ex
+
+        @classmethod
+        def tearDownClass(cls):
+            for line in cls.container.logs().splitlines():
+                print(line)
+            cls.container.kill()
+
+
+class AzuriteStorageTest(Base.ContainerTestBase):
+    account = "devstoreaccount1"
+    secret = (
+        "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6I"
+        "FsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="
+    )
+
+    image = "arafato/azurite"
+    port = 10000
+    environment = {"executable": "blob"}
+
+    has_sas_support = False
+
+    def test_cdn_url(self):
+        if not self.has_sas_support:
+            self.skipTest("Storage backend has no account SAS support")
+
+
+class AzuriteV3StorageTest(AzuriteStorageTest):
+    image = "mcr.microsoft.com/azure-storage/azurite"
+    has_sas_support = True
+
+    def test_upload_via_stream_with_content_encoding(self):
+        self.skipTest(
+            "Possible bug in AzuriteV3, see https://github.com/Azure/Azurite/issues/629"
+        )
+
+
+class IotedgeStorageTest(Base.ContainerTestBase):
+    account = "".join(random.choice(string.ascii_lowercase) for _ in range(10))
+    secret = base64.b64encode(
+        "".join(random.choice(string.hexdigits) for _ in range(20)).encode("ascii")
+    ).decode("ascii")
+
+    image = "mcr.microsoft.com/azure-blob-storage"
+    port = 11002
+    environment = {
+        "LOCAL_STORAGE_ACCOUNT_NAME": account,
+        "LOCAL_STORAGE_ACCOUNT_KEY": secret,
+    }
+
+
+def _random_container_name(prefix="test"):
+    max_length = 63
+    suffix = "".join(random.choice(string.ascii_lowercase) for _ in range(max_length))
+    name = prefix + suffix
+    name = re.sub("[^a-z0-9-]", "-", name)
+    name = re.sub("-+", "-", name)
+    name = name[:max_length]
+    name = name.lower()
+    return name
+
+
+def _read_stream(stream):
+    buffer = io.BytesIO()
+    buffer.writelines(stream)
+    buffer.seek(0)
+    return buffer.read()
+
+
+if __name__ == "__main__":
+    sys.exit(unittest.main())
diff --git a/requirements-tests.txt b/requirements-tests.txt
index 7ebe7ca..b85a08b 100644
--- a/requirements-tests.txt
+++ b/requirements-tests.txt
@@ -5,6 +5,7 @@ pylint==2.4.4
 mock==3.0.5
 codecov==2.1.10
 coverage==4.5.4
+docker
 requests
 requests_mock
 pytest==5.3.2