Explorar o código

Merge pull request #28 from VJalili/master

New Features: (1) Upload a file to bucket, (2) list keys with a prefix, (3) check for the existence of key within bucket, and (4) get a URL (with TTL) for a given object.
Enis Afgan %!s(int64=9) %!d(string=hai) anos
pai
achega
ace93b0bcb

+ 42 - 2
cloudbridge/cloud/interfaces/resources.py

@@ -2054,6 +2054,26 @@ class BucketObject(CloudResource):
         """
         pass
 
+    @abstractmethod
+    def upload_from_file(self, path):
+        """
+        Stores the contents of the file pointed by the "path" variable.
+        :param path: Absolute path to the file to be uploaded to S3.
+        :return: void
+        """
+        pass
+
+    @abstractmethod
+    def upload_from_large_file(self, path):
+        """
+        Stores the contents of the large file pointed by the "path" variable.
+        This function split the file in smaller chunks, and uploads chunks
+        in turn.
+        :param path: Absolute path to the large file to be uploaded to S3.
+        :return: void
+        """
+        pass
+
     @abstractmethod
     def delete(self):
         """
@@ -2064,6 +2084,17 @@ class BucketObject(CloudResource):
         """
         pass
 
+    @abstractmethod
+    def generate_url(self, expires_in):
+        """
+        Generates a URL to this object. If the object is public, `expires_in`
+        argument is not necessary, but if the object is private, the life time
+        of URL is set using `expires_in` argument.
+        :param expires_in: time to live of the generated URL in seconds.
+        :return: A URL to access the object.
+        """
+        pass
+
 
 class Bucket(PageableObjectMixin, CloudResource):
 
@@ -2103,9 +2134,9 @@ class Bucket(PageableObjectMixin, CloudResource):
         pass
 
     @abstractmethod
-    def list(self, limit=None, marker=None):
+    def list(self, limit=None, marker=None, prefix=None):
         """
-        List all objects within this bucket.
+        List all objects, or those adhere to the prefix criterion, within this bucket.
 
         :rtype: :class:``.BucketObject``
         :return: List of all available BucketObjects within this bucket.
@@ -2135,3 +2166,12 @@ class Bucket(PageableObjectMixin, CloudResource):
         :return: The newly created bucket object
         """
         pass
+
+    @abstractmethod
+    def exists(self, key):
+        """
+        Determines if an object with given key exists in this bucket.
+        :param key: The key to be searched in the bucket.
+        :return: Boolean: True if the key exists, False, if it does not.
+        """
+        pass

+ 53 - 13
cloudbridge/cloud/providers/aws/resources.py

@@ -674,17 +674,17 @@ class AWSSecurityGroup(BaseSecurityGroup):
                  cidr_ip=None, src_group=None):
         for rule in self._security_group.rules:
             if (rule.ip_protocol == ip_protocol and
-               rule.from_port == from_port and
-               rule.to_port == to_port and
-               rule.grants[0].cidr_ip == cidr_ip) or \
-               (rule.grants[0].group_id == src_group.id if src_group and
-               hasattr(rule.grants[0], 'group_id') else False):
+                        rule.from_port == from_port and
+                        rule.to_port == to_port and
+                        rule.grants[0].cidr_ip == cidr_ip) or \
+                    (rule.grants[0].group_id == src_group.id if src_group and
+                        hasattr(rule.grants[0], 'group_id') else False):
                 return AWSSecurityGroupRule(self._provider, rule, self)
         return None
 
     def to_json(self):
-        attr = inspect.getmembers(self, lambda a: not(inspect.isroutine(a)))
-        js = {k: v for(k, v) in attr if not k.startswith('_')}
+        attr = inspect.getmembers(self, lambda a: not (inspect.isroutine(a)))
+        js = {k: v for (k, v) in attr if not k.startswith('_')}
         json_rules = [r.to_json() for r in self.rules]
         js['rules'] = [json.loads(r) for r in json_rules]
         if js.get('network_id'):
@@ -740,8 +740,8 @@ class AWSSecurityGroupRule(BaseSecurityGroupRule):
         return None
 
     def to_json(self):
-        attr = inspect.getmembers(self, lambda a: not(inspect.isroutine(a)))
-        js = {k: v for(k, v) in attr if not k.startswith('_')}
+        attr = inspect.getmembers(self, lambda a: not (inspect.isroutine(a)))
+        js = {k: v for (k, v) in attr if not k.startswith('_')}
         js['group'] = self.group.id if self.group else ''
         js['parent'] = self.parent.id if self.parent else ''
         return json.dumps(js, sort_keys=True)
@@ -809,6 +809,24 @@ class AWSBucketObject(BaseBucketObject):
         """
         self._key.set_contents_from_string(data)
 
+    def upload_from_file(self, path):
+        """
+        Stores the contents of the file pointed by the "path" variable.
+        :param path: Absolute path to the file to be uploaded to S3.
+        :return: void
+        """
+        self._key.set_contents_from_filename(path)
+
+    def upload_from_large_file(self, path):
+        """
+        Stores the contents of the large file pointed by the "path" variable.
+        This function split the file in smaller chunks, and uploads chunks
+        in turn.
+        :param path: Absolute path to the large file to be uploaded to S3.
+        :return: void
+        """
+        raise NotImplementedError("This functionality is not implemented yet.")
+
     def delete(self):
         """
         Delete this object.
@@ -818,6 +836,16 @@ class AWSBucketObject(BaseBucketObject):
         """
         self._key.delete()
 
+    def generate_url(self, expires_in=0):
+        """
+        Generates a URL to this object. If the object is public, `expires_in`
+        argument is not necessary, but if the object is private, the life time
+        of URL is set using `expires_in` argument.
+        :param expires_in: time to live of the generated URL in seconds.
+        :return: The URL to access the object.
+        """
+        return self._key.generate_url(expires_in=expires_in)
+
 
 class AWSBucket(BaseBucket):
 
@@ -841,11 +869,11 @@ class AWSBucket(BaseBucket):
         Retrieve a given object from this bucket.
         """
         key = Key(self._bucket, key)
-        if key.exists():
+        if key and key.exists():
             return AWSBucketObject(self._provider, key)
         return None
 
-    def list(self, limit=None, marker=None):
+    def list(self, limit=None, marker=None, prefix=None):
         """
         List all objects within this bucket.
 
@@ -853,7 +881,8 @@ class AWSBucket(BaseBucket):
         :return: List of all available BucketObjects within this bucket.
         """
         objects = [AWSBucketObject(self._provider, obj)
-                   for obj in self._bucket.list()]
+                   for obj in self._bucket.list(prefix=prefix)]
+
         return ClientPagedResultList(self._provider, objects,
                                      limit=limit, marker=marker)
 
@@ -867,6 +896,17 @@ class AWSBucket(BaseBucket):
         key = Key(self._bucket, name)
         return AWSBucketObject(self._provider, key)
 
+    def exists(self, key):
+        """
+        Determines if an object with given key exists in this bucket.
+        :param key: The key to be searched in the bucket.
+        :return: Boolean: True if the key exists, False, if it does not.
+        """
+        key = Key(self._bucket, key)
+        if key and key.exists():
+            return True
+        return False
+
 
 class AWSRegion(BaseRegion):
 
@@ -1100,7 +1140,7 @@ class AWSRouter(BaseRouter):
     def state(self):
         self.refresh()  # Explicitly refresh the local object
         if self._router.attachments and \
-           self._router.attachments[0].state == 'available':
+                        self._router.attachments[0].state == 'available':
             return RouterState.ATTACHED
         return RouterState.DETACHED
 

+ 40 - 1
cloudbridge/cloud/providers/openstack/resources.py

@@ -1062,6 +1062,24 @@ class OpenStackBucketObject(BaseBucketObject):
         self._provider.swift.put_object(self.cbcontainer.name, self.name,
                                         data)
 
+    def upload_from_file(self, path):
+        """
+        Stores the contents of the file pointed by the "path" variable.
+        :param path: Absolute path to the file to be uploaded to S3.
+        :return: void
+        """
+        raise NotImplementedError("This functionality is not implemented yet.")
+
+    def upload_from_large_file(self, path):
+        """
+        Stores the contents of the large file pointed by the "path" variable.
+        This function split the file in smaller chunks, and uploads chunks
+        in turn.
+        :param path: Absolute path to the large file to be uploaded to S3.
+        :return: void
+        """
+        raise NotImplementedError("This functionality is not implemented yet.")
+
     def delete(self):
         """
         Delete this object.
@@ -1077,6 +1095,16 @@ class OpenStackBucketObject(BaseBucketObject):
                 return True
         return False
 
+    def generate_url(self, expires_in=0):
+        """
+        Generates a URL to this object. If the object is public, `expires_in`
+        argument is not necessary, but if the object is private, the life time
+        of URL is set using `expires_in` argument.
+        :param expires_in: time to live of the generated URL in seconds.
+        :return: A URL to access the object.
+        """
+        raise NotImplementedError("This functionality is not implemented yet.")
+
 
 class OpenStackBucket(BaseBucket):
 
@@ -1107,13 +1135,16 @@ class OpenStackBucket(BaseBucket):
         else:
             return None
 
-    def list(self, limit=None, marker=None):
+    def list(self, limit=None, marker=None, prefix=None):
         """
         List all objects within this bucket.
 
         :rtype: BucketObject
         :return: List of all available BucketObjects within this bucket.
         """
+        if prefix is not None:
+            raise NotImplementedError("This functionality is not implemented yet.")
+
         _, object_list = self._provider.swift.get_container(
             self.name, limit=oshelpers.os_result_limit(self._provider, limit),
             marker=marker)
@@ -1134,3 +1165,11 @@ class OpenStackBucket(BaseBucket):
     def create_object(self, object_name):
         self._provider.swift.put_object(self.name, object_name, None)
         return self.get(object_name)
+
+    def exists(self, key):
+        """
+        Determines if an object with given key exists in this bucket.
+        :param key: The key to be searched in the bucket.
+        :return: Boolean: True if the key exists, False, if it does not.
+        """
+        raise NotImplementedError("This functionality is not implemented yet.")

+ 31 - 3
test/test_object_store_service.py

@@ -1,6 +1,8 @@
 from datetime import datetime
 from io import BytesIO
 import uuid
+import urllib
+import os
 
 from cloudbridge.cloud.interfaces.resources import BucketObject
 from test.helpers import ProviderTestBase
@@ -82,7 +84,8 @@ class CloudObjectStoreServiceTestCase(ProviderTestBase):
         self.assertEqual([], objects)
 
         with helpers.cleanup_action(lambda: test_bucket.delete()):
-            obj_name = "hello_world.txt"
+            obj_name_prefix = "hello"
+            obj_name = obj_name_prefix + "_world.txt"
             obj = test_bucket.create_object(obj_name)
 
             self.assertTrue(
@@ -132,6 +135,13 @@ class CloudObjectStoreServiceTestCase(ProviderTestBase):
                     isinstance(obj_too, BucketObject),
                     "Did not get object {0} of expected type.".format(obj_too))
 
+                prefix_filtered_list = test_bucket.list(prefix=obj_name_prefix)
+                self.assertTrue(
+                    len(objs) == len(prefix_filtered_list) == 1,
+                    'The number of objects returned by list function, '
+                    'with and without a prefix, are expected to be equal, '
+                    'but its detected otherwise.')
+
             objs = test_bucket.list()
             found_objs = [o for o in objs if o.name == obj_name]
             self.assertTrue(
@@ -141,16 +151,15 @@ class CloudObjectStoreServiceTestCase(ProviderTestBase):
 
     @helpers.skipIfNoService(['object_store'])
     def test_upload_download_bucket_content(self):
-
         name = "cbtestbucketobjs-{0}".format(uuid.uuid4())
         test_bucket = self.provider.object_store.create(name)
 
         with helpers.cleanup_action(lambda: test_bucket.delete()):
             obj_name = "hello_upload_download.txt"
             obj = test_bucket.create_object(obj_name)
+            content = "Hello World. Here's some content."
 
             with helpers.cleanup_action(lambda: obj.delete()):
-                content = b"Hello World. Here's some content."
                 # TODO: Upload and download methods accept different parameter
                 # types. Need to make this consistent - possibly provider
                 # multiple methods like upload_from_file, from_stream etc.
@@ -162,3 +171,22 @@ class CloudObjectStoreServiceTestCase(ProviderTestBase):
                 for data in obj.iter_content():
                     target_stream2.write(data)
                 self.assertEqual(target_stream2.getvalue(), content)
+                url = obj.generate_url(100)
+                urllib.urlretrieve(url, "Downloaded_" + obj.name)
+                with open("Downloaded_" + obj.name) as tmpFile:
+                    for line in tmpFile:
+                        self.assertEqual(line, content)
+                os.remove("Downloaded_" + obj.name)
+
+            obj = test_bucket.create_object(obj_name)
+            with helpers.cleanup_action(lambda: obj.delete):
+                with open("hello_upload_download.txt", "w+") as tmpFile:
+                    tmpFile.write(content)
+                    obj.upload_from_file(tmpFile.name)
+                    target_stream = BytesIO()
+                    obj.save_content(target_stream)
+                    self.assertEqual(target_stream.getvalue(), content)
+                    target_stream2 = BytesIO()
+                    for data in obj.iter_content():
+                        target_stream2.write(data)
+                    self.assertEqual(target_stream2.getvalue(), content)