You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by cl...@apache.org on 2021/05/07 01:53:28 UTC
[libcloud] 02/02: Add integration tests for S3
This is an automated email from the ASF dual-hosted git repository.
clewolff pushed a commit to branch storage-s3-integration-tests
in repository https://gitbox.apache.org/repos/asf/libcloud.git
commit 305d680fde4c0398d9cc1a7e31428309fe5b0fbb
Author: Clemens Wolff <cl...@apache.org>
AuthorDate: Thu May 6 21:53:07 2021 -0400
Add integration tests for S3
---
integration/storage/base.py | 8 ++--
integration/storage/requirements.txt | 1 +
integration/storage/test_s3.py | 78 ++++++++++++++++++++++++++++++++++++
3 files changed, 84 insertions(+), 3 deletions(-)
diff --git a/integration/storage/base.py b/integration/storage/base.py
index 902ccd6..dda7d06 100644
--- a/integration/storage/base.py
+++ b/integration/storage/base.py
@@ -98,9 +98,7 @@ class Integration:
container = self.driver.get_container(container_name)
self.assertEqual(container.name, container_name)
- # check that an existing container can't be re-created
- with self.assertRaises(types.ContainerAlreadyExistsError):
- self.driver.create_container(container_name)
+ self.assert_existing_container_cannot_be_recreated(container)
# check that the new container can be listed
containers = self.driver.list_containers()
@@ -151,6 +149,10 @@ class Integration:
blobs = self.driver.list_container_objects(container)
self.assertEqual([blob.name for blob in blobs], [blob_name[::-1]])
+ def assert_existing_container_cannot_be_recreated(self, container):
+ with self.assertRaises(types.ContainerAlreadyExistsError):
+ self.driver.create_container(container.name)
+
def assert_file_is_missing(self, container, obj):
with self.assertRaises(types.ObjectDoesNotExistError):
self.driver.delete_object(obj)
diff --git a/integration/storage/requirements.txt b/integration/storage/requirements.txt
index cbdd599..d54d412 100644
--- a/integration/storage/requirements.txt
+++ b/integration/storage/requirements.txt
@@ -1,5 +1,6 @@
azure-identity
azure-mgmt-resource
azure-mgmt-storage
+boto3
docker
requests
diff --git a/integration/storage/test_s3.py b/integration/storage/test_s3.py
new file mode 100644
index 0000000..8276b80
--- /dev/null
+++ b/integration/storage/test_s3.py
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the 'License'); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import sys
+import unittest
+
+try:
+ import boto3
+except ImportError:
+ boto3 = None
+
+from integration.storage.base import Integration
+from libcloud.storage import types
+
+
+class S3Test(Integration.TestBase):
+ provider = 's3'
+
+ @classmethod
+ def setUpClass(cls):
+ if boto3 is None:
+ raise unittest.SkipTest('missing boto3 library')
+
+ config = {
+ key: os.getenv(key)
+ for key in (
+ 'AWS_ACCESS_KEY_ID',
+ 'AWS_ACCESS_KEY_SECRET',
+ )
+ }
+
+ for key, value in config.items():
+ if not value:
+ raise unittest.SkipTest('missing environment variable %s' % key)
+
+ cls.account = config['AWS_ACCESS_KEY_ID']
+ cls.secret = config['AWS_ACCESS_KEY_SECRET']
+
+ @classmethod
+ def tearDownClass(cls):
+ client = boto3.Session(
+ aws_access_key_id=cls.account,
+ aws_secret_access_key=cls.secret,
+ ).client('s3')
+
+ buckets = (
+ item['Name']
+ for item in client.list_buckets()['Buckets']
+ if item['Name'].startswith(cls.container_name_prefix)
+ )
+
+ for name in buckets:
+ bucket = boto3.resource('s3').Bucket(name)
+ bucket.objects.delete()
+ client.delete_bucket(name)
+
+ def assert_existing_container_cannot_be_recreated(self, container):
+ pass
+
+ def assert_file_is_missing(self, container, obj):
+ with self.assertRaises(types.ObjectDoesNotExistError):
+ self.driver.get_object(container.name, obj.name)
+
+
+if __name__ == '__main__':
+ sys.exit(unittest.main())