diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index f60df5c6a26c..c1d848bd7f97 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -20,8 +20,9 @@ class _KeyIterator(Iterator): :type bucket: :class:`gcloud.storage.bucket.Bucket` :param bucket: The bucket from which to list keys. """ - def __init__(self, bucket, connection=None, extra_params=None): + def __init__(self, bucket, extra_params=None): self.bucket = bucket + self.prefixes = () super(_KeyIterator, self).__init__( connection=bucket.connection, path=bucket.path + '/o', extra_params=extra_params) @@ -32,6 +33,7 @@ def get_items_from_response(self, response): :type response: dict :param response: The JSON API response for a page of keys. """ + self.prefixes = tuple(response.get('prefixes', ())) for item in response.get('items', []): yield Key.from_dict(item, bucket=self.bucket) @@ -170,6 +172,42 @@ def get_all_keys(self): """ return list(self) + def iterator(self, prefix=None, delimiter=None, max_results=None, + versions=None): + """Return an iterator used to find keys in the bucket. + + :type prefix: string or None + :param prefix: optional prefix used to filter keys. + + :type delimiter: string or None + :param delimiter: optional delimter, used with ``prefix`` to + emulate hierarchy. + + :type max_results: integer or None + :param max_results: maximum number of keys to return. + + :type versions: boolean or None + :param versions: whether object versions should be returned as + separate keys. + + :rtype: :class:`_KeyIterator` + """ + extra_params = {} + + if prefix is not None: + extra_params['prefix'] = prefix + + if delimiter is not None: + extra_params['delimiter'] = delimiter + + if max_results is not None: + extra_params['maxResults'] = max_results + + if versions is not None: + extra_params['versions'] = versions + + return self._iterator_class(self, extra_params=extra_params) + def new_key(self, key): """Given path name (or Key), return a :class:`.storage.key.Key` object. @@ -665,7 +703,6 @@ def make_public(self, recursive=False, future=False): doa.save() if recursive: - iterator = self._iterator_class(self) - for key in iterator: + for key in self: key.get_acl().all().grant_read() key.save_acl() diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index a8464063eb1f..83a88a35d397 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -3,6 +3,49 @@ import unittest2 +class Test__KeyIterator(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.storage.bucket import _KeyIterator + return _KeyIterator + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = _Connection() + bucket = _Bucket(connection) + iterator = self._makeOne(bucket) + self.assertTrue(iterator.bucket is bucket) + self.assertTrue(iterator.connection is connection) + self.assertEqual(iterator.path, '%s/o' % bucket.path) + self.assertEqual(iterator.page_number, 0) + self.assertEqual(iterator.next_page_token, None) + self.assertEqual(iterator.prefixes, ()) + + def test_get_items_from_response_empty(self): + connection = _Connection() + bucket = _Bucket(connection) + iterator = self._makeOne(bucket) + self.assertEqual(list(iterator.get_items_from_response({})), []) + self.assertEqual(iterator.prefixes, ()) + + def test_get_items_from_response_non_empty(self): + from gcloud.storage.key import Key + KEY = 'key' + response = {'items': [{'name': KEY}], 'prefixes': ['foo']} + connection = _Connection() + bucket = _Bucket(connection) + iterator = self._makeOne(bucket) + keys = list(iterator.get_items_from_response(response)) + self.assertEqual(len(keys), 1) + key = keys[0] + self.assertTrue(isinstance(key, Key)) + self.assertTrue(key.connection is connection) + self.assertEqual(key.name, KEY) + self.assertEqual(iterator.prefixes, ('foo',)) + + class Test_Bucket(unittest2.TestCase): def _getTargetClass(self): @@ -170,6 +213,41 @@ def test_get_all_keys_non_empty(self): self.assertEqual(kw['path'], '/b/%s/o' % NAME) self.assertEqual(kw['query_params'], {}) + def test_iterator_defaults(self): + NAME = 'name' + connection = _Connection({'items': []}) + bucket = self._makeOne(connection, NAME) + iterator = bucket.iterator() + keys = list(iterator) + self.assertEqual(keys, []) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], '/b/%s/o' % NAME) + self.assertEqual(kw['query_params'], {}) + + def test_iterator_explicit(self): + NAME = 'name' + EXPECTED = { + 'prefix': 'subfolder', + 'delimiter': '/', + 'maxResults': 10, + 'versions': True, + } + connection = _Connection({'items': []}) + bucket = self._makeOne(connection, NAME) + iterator = bucket.iterator( + prefix='subfolder', + delimiter='/', + max_results=10, + versions=True, + ) + keys = list(iterator) + self.assertEqual(keys, []) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], '/b/%s/o' % NAME) + self.assertEqual(kw['query_params'], EXPECTED) + def test_new_key_existing(self): from gcloud.storage.key import Key existing = Key() @@ -882,46 +960,6 @@ def get_items_from_response(self, response): self.assertEqual(kw[1]['query_params'], {}) -class Test__KeyIterator(unittest2.TestCase): - - def _getTargetClass(self): - from gcloud.storage.bucket import _KeyIterator - return _KeyIterator - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - bucket = _Bucket(connection) - iterator = self._makeOne(bucket) - self.assertTrue(iterator.bucket is bucket) - self.assertTrue(iterator.connection is connection) - self.assertEqual(iterator.path, '%s/o' % bucket.path) - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, None) - - def test_get_items_from_response_empty(self): - connection = _Connection() - bucket = _Bucket(connection) - iterator = self._makeOne(bucket) - self.assertEqual(list(iterator.get_items_from_response({})), []) - - def test_get_items_from_response_non_empty(self): - from gcloud.storage.key import Key - KEY = 'key' - response = {'items': [{'name': KEY}]} - connection = _Connection() - bucket = _Bucket(connection) - iterator = self._makeOne(bucket) - keys = list(iterator.get_items_from_response(response)) - self.assertEqual(len(keys), 1) - key = keys[0] - self.assertTrue(isinstance(key, Key)) - self.assertTrue(key.connection is connection) - self.assertEqual(key.name, KEY) - - class _Connection(object): _delete_ok = False diff --git a/regression/storage.py b/regression/storage.py index 08775d655425..b205f4fe8586 100644 --- a/regression/storage.py +++ b/regression/storage.py @@ -190,12 +190,11 @@ def test_list_files(self): def test_paginate_files(self): truncation_size = 1 - extra_params = {'maxResults': len(self.FILENAMES) - truncation_size} - iterator = storage.key._KeyIterator(bucket=self.bucket, - extra_params=extra_params) + count = len(self.FILENAMES) - truncation_size + iterator = self.bucket.iterator(max_results=count) response = iterator.get_next_page_response() keys = list(iterator.get_items_from_response(response)) - self.assertEqual(len(keys), extra_params['maxResults']) + self.assertEqual(len(keys), count) self.assertEqual(iterator.page_number, 1) self.assertTrue(iterator.next_page_token is not None)