You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@libcloud.apache.org by an...@apache.org on 2017/01/09 04:53:04 UTC

[44/51] [abbrv] libcloud git commit: linting and unused exports

linting and unused exports


Project: http://git-wip-us.apache.org/repos/asf/libcloud/repo
Commit: http://git-wip-us.apache.org/repos/asf/libcloud/commit/c3ee0f08
Tree: http://git-wip-us.apache.org/repos/asf/libcloud/tree/c3ee0f08
Diff: http://git-wip-us.apache.org/repos/asf/libcloud/diff/c3ee0f08

Branch: refs/heads/trunk
Commit: c3ee0f08a08b89b5e31417c392e1b3b0b1d75bb0
Parents: f5fa8a8
Author: Anthony Shaw <an...@apache.org>
Authored: Mon Jan 9 13:53:53 2017 +1100
Committer: Anthony Shaw <an...@apache.org>
Committed: Mon Jan 9 13:53:53 2017 +1100

----------------------------------------------------------------------
 libcloud/common/base.py                 |  5 ++--
 libcloud/httplib_ssl.py                 |  2 +-
 libcloud/storage/base.py                |  1 -
 libcloud/storage/drivers/azure_blobs.py | 43 +++++++++-------------------
 libcloud/storage/drivers/dummy.py       |  1 +
 libcloud/storage/drivers/oss.py         | 13 ++++-----
 libcloud/storage/drivers/s3.py          | 12 ++++----
 7 files changed, 29 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/common/base.py
----------------------------------------------------------------------
diff --git a/libcloud/common/base.py b/libcloud/common/base.py
index cb2764d..3c1de37 100644
--- a/libcloud/common/base.py
+++ b/libcloud/common/base.py
@@ -541,8 +541,9 @@ class Connection(object):
                      storage API when uploading a file.
 
         :type stream: ``bool``
-        :param stream: True to return an iterator in Response.iter_content and allow
-                    streaming of the response data (for downloading large files)
+        :param stream: True to return an iterator in Response.iter_content
+                    and allow streaming of the response data
+                    (for downloading large files)
 
         :return: An :class:`Response` instance.
         :rtype: :class:`Response` instance

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/httplib_ssl.py
----------------------------------------------------------------------
diff --git a/libcloud/httplib_ssl.py b/libcloud/httplib_ssl.py
index e6ab2ba..f47e491 100644
--- a/libcloud/httplib_ssl.py
+++ b/libcloud/httplib_ssl.py
@@ -153,7 +153,7 @@ class LibcloudBaseConnection(object):
                 if len(libcloud.security.CA_CERTS_PATH) > 1:
                     warnings.warn('Only 1 certificate path is supported')
                 self.ca_cert = libcloud.security.CA_CERTS_PATH[0]
-            else:     
+            else:
                 self.ca_cert = libcloud.security.CA_CERTS_PATH
 
 

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/storage/base.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/base.py b/libcloud/storage/base.py
index cd210aa..84d8904 100644
--- a/libcloud/storage/base.py
+++ b/libcloud/storage/base.py
@@ -23,7 +23,6 @@ from __future__ import with_statement
 import os.path                          # pylint: disable-msg=W0404
 import hashlib
 from os.path import join as pjoin
-from io import BufferedIOBase
 
 from libcloud.utils.py3 import httplib
 from libcloud.utils.py3 import next

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/storage/drivers/azure_blobs.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/azure_blobs.py b/libcloud/storage/drivers/azure_blobs.py
index b30d624..497ec48 100644
--- a/libcloud/storage/drivers/azure_blobs.py
+++ b/libcloud/storage/drivers/azure_blobs.py
@@ -43,9 +43,6 @@ from libcloud.storage.types import ContainerDoesNotExistError
 from libcloud.storage.types import ObjectDoesNotExistError
 from libcloud.storage.types import ObjectHashMismatchError
 
-if PY3:
-    from io import FileIO as file
-
 # Desired number of items in each response inside a paginated request
 RESPONSES_PER_REQUEST = 100
 
@@ -764,31 +761,19 @@ class AzureBlobsStorageDriver(StorageDriver):
 
         self._check_values(ex_blob_type, file_size)
 
-        with file(file_path, 'rb') as file_handle:
-            iterator = iter(file_handle)
-
-            # If size is greater than 64MB or type is Page, upload in chunks
-            if ex_blob_type == 'PageBlob' or file_size > AZURE_BLOCK_MAX_SIZE:
-                # For chunked upload of block blobs, the initial size must
-                # be 0.
-                if ex_blob_type == 'BlockBlob':
-                    object_size = None
-
-                object_path = self._get_object_path(container, object_name)
-
-                upload_func = self._upload_in_chunks
-                upload_func_kwargs = {'iterator': iterator,
-                                      'object_path': object_path,
-                                      'blob_type': ex_blob_type,
-                                      'lease': None}
-
-            return self._put_object(container=container,
-                                    object_name=object_name,
-                                    object_size=object_size,
-                                    file_path=file_path, extra=extra,
-                                    verify_hash=verify_hash,
-                                    blob_type=ex_blob_type,
-                                    use_lease=ex_use_lease)
+        # If size is greater than 64MB or type is Page, upload in chunks
+        if ex_blob_type == 'PageBlob' or file_size > AZURE_BLOCK_MAX_SIZE:
+            # For chunked upload of block blobs, the initial size must
+            # be 0.
+            if ex_blob_type == 'BlockBlob':
+                object_size = None
+        return self._put_object(container=container,
+                                object_name=object_name,
+                                object_size=object_size,
+                                file_path=file_path, extra=extra,
+                                verify_hash=verify_hash,
+                                blob_type=ex_blob_type,
+                                use_lease=ex_use_lease)
 
     def upload_object_via_stream(self, iterator, container, object_name,
                                  verify_hash=False, extra=None,
@@ -813,8 +798,6 @@ class AzureBlobsStorageDriver(StorageDriver):
 
         self._check_values(ex_blob_type, ex_page_blob_size)
 
-        object_path = self._get_object_path(container, object_name)
-
         return self._put_object(container=container,
                                 object_name=object_name,
                                 object_size=ex_page_blob_size,

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/storage/drivers/dummy.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/dummy.py b/libcloud/storage/drivers/dummy.py
index 4065123..eaec2d7 100644
--- a/libcloud/storage/drivers/dummy.py
+++ b/libcloud/storage/drivers/dummy.py
@@ -81,6 +81,7 @@ class DummyIterator(object):
     def __exit__(self, type, value, traceback):
         pass
 
+
 class DummyStorageDriver(StorageDriver):
     """
     Dummy Storage driver.

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/storage/drivers/oss.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/oss.py b/libcloud/storage/drivers/oss.py
index 24cde67..80b8f66 100644
--- a/libcloud/storage/drivers/oss.py
+++ b/libcloud/storage/drivers/oss.py
@@ -18,7 +18,6 @@
 import base64
 import codecs
 import hmac
-import os
 import time
 import sys
 from hashlib import sha1
@@ -40,14 +39,12 @@ from libcloud.utils.py3 import b
 from libcloud.utils.py3 import tostring
 from libcloud.utils.py3 import PY3
 from libcloud.utils.xml import fixxpath, findtext
-from libcloud.utils.files import guess_file_mime_type, read_in_chunks, \
-    exhaust_iterator
+from libcloud.utils.files import read_in_chunks
 from libcloud.common.types import InvalidCredsError, LibcloudError
 from libcloud.common.base import ConnectionUserAndKey, RawResponse, \
     XmlResponse
 from libcloud.common.types import MalformedResponseError
-from libcloud.storage.base import Object, Container, StorageDriver, \
-    DEFAULT_CONTENT_TYPE
+from libcloud.storage.base import Object, Container, StorageDriver
 from libcloud.storage.types import ContainerError
 from libcloud.storage.types import ContainerIsNotEmptyError
 from libcloud.storage.types import InvalidContainerNameError
@@ -588,9 +585,9 @@ class OSSStorageDriver(StorageDriver):
         name = urlquote(name)
         return name
 
-    def _put_object(self, container, object_name, method='PUT', query_args=None,
-                    extra=None, file_path=None, stream=None,
-                    verify_hash=False):
+    def _put_object(self, container, object_name, method='PUT',
+                    query_args=None, extra=None, file_path=None,
+                    stream=None, verify_hash=False):
         """
         Create an object and upload data using the given function.
         """

http://git-wip-us.apache.org/repos/asf/libcloud/blob/c3ee0f08/libcloud/storage/drivers/s3.py
----------------------------------------------------------------------
diff --git a/libcloud/storage/drivers/s3.py b/libcloud/storage/drivers/s3.py
index 59b9021..72ddcf6 100644
--- a/libcloud/storage/drivers/s3.py
+++ b/libcloud/storage/drivers/s3.py
@@ -16,7 +16,6 @@
 import base64
 import hmac
 import time
-import sys
 
 from hashlib import sha1
 
@@ -417,11 +416,12 @@ class BaseS3StorageDriver(StorageDriver):
         response = self.connection.request(obj_path, method='GET',
                                            stream=True, raw=True)
 
-        return self._get_object(obj=obj, callback=read_in_chunks,
-                                response=response,
-                                callback_kwargs={'iterator': response.iter_content,
-                                                 'chunk_size': chunk_size},
-                                success_status_code=httplib.OK)
+        return self._get_object(
+            obj=obj, callback=read_in_chunks,
+            response=response,
+            callback_kwargs={'iterator': response.iter_content,
+                             'chunk_size': chunk_size},
+            success_status_code=httplib.OK)
 
     def upload_object(self, file_path, container, object_name, extra=None,
                       verify_hash=True, ex_storage_class=None):