Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #26: expose bucket iterator w extra params #359

Merged
merged 5 commits into from
Nov 12, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 40 additions & 3 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@ class _KeyIterator(Iterator):
:type bucket: :class:`gcloud.storage.bucket.Bucket`
:param bucket: The bucket from which to list keys.
"""
def __init__(self, bucket, connection=None, extra_params=None):
def __init__(self, bucket, extra_params=None):
self.bucket = bucket
self.prefixes = ()
super(_KeyIterator, self).__init__(
connection=bucket.connection, path=bucket.path + '/o',
extra_params=extra_params)
Expand All @@ -32,6 +33,7 @@ def get_items_from_response(self, response):
:type response: dict
:param response: The JSON API response for a page of keys.
"""
self.prefixes = tuple(response.get('prefixes', ()))
for item in response.get('items', []):
yield Key.from_dict(item, bucket=self.bucket)

Expand Down Expand Up @@ -170,6 +172,42 @@ def get_all_keys(self):
"""
return list(self)

def iterator(self, prefix=None, delimiter=None, max_results=None,
versions=None):
"""Return an iterator used to find keys in the bucket.

:type prefix: string or None
:param prefix: optional prefix used to filter keys.

:type delimiter: string or None
:param delimiter: optional delimter, used with ``prefix`` to
emulate hierarchy.

:type max_results: integer or None
:param max_results: maximum number of keys to return.

:type versions: boolean or None
:param versions: whether object versions should be returned as
separate keys.

:rtype: :class:`_KeyIterator`
"""
extra_params = {}

if prefix is not None:

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

extra_params['prefix'] = prefix

if delimiter is not None:
extra_params['delimiter'] = delimiter

if max_results is not None:
extra_params['maxResults'] = max_results

if versions is not None:
extra_params['versions'] = versions

return self._iterator_class(self, extra_params=extra_params)

def new_key(self, key):
"""Given path name (or Key), return a :class:`.storage.key.Key` object.

Expand Down Expand Up @@ -665,7 +703,6 @@ def make_public(self, recursive=False, future=False):
doa.save()

if recursive:
iterator = self._iterator_class(self)
for key in iterator:
for key in self:
key.get_acl().all().grant_read()
key.save_acl()
118 changes: 78 additions & 40 deletions gcloud/storage/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,49 @@
import unittest2


class Test__KeyIterator(unittest2.TestCase):

def _getTargetClass(self):
from gcloud.storage.bucket import _KeyIterator
return _KeyIterator

def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)

def test_ctor(self):
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
self.assertTrue(iterator.bucket is bucket)
self.assertTrue(iterator.connection is connection)
self.assertEqual(iterator.path, '%s/o' % bucket.path)
self.assertEqual(iterator.page_number, 0)
self.assertEqual(iterator.next_page_token, None)
self.assertEqual(iterator.prefixes, ())

def test_get_items_from_response_empty(self):
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
self.assertEqual(list(iterator.get_items_from_response({})), [])
self.assertEqual(iterator.prefixes, ())

def test_get_items_from_response_non_empty(self):
from gcloud.storage.key import Key
KEY = 'key'
response = {'items': [{'name': KEY}], 'prefixes': ['foo']}
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
keys = list(iterator.get_items_from_response(response))
self.assertEqual(len(keys), 1)
key = keys[0]
self.assertTrue(isinstance(key, Key))
self.assertTrue(key.connection is connection)
self.assertEqual(key.name, KEY)
self.assertEqual(iterator.prefixes, ('foo',))


class Test_Bucket(unittest2.TestCase):

def _getTargetClass(self):
Expand Down Expand Up @@ -170,6 +213,41 @@ def test_get_all_keys_non_empty(self):
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], {})

def test_iterator_defaults(self):
NAME = 'name'
connection = _Connection({'items': []})
bucket = self._makeOne(connection, NAME)
iterator = bucket.iterator()
keys = list(iterator)
self.assertEqual(keys, [])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], {})

def test_iterator_explicit(self):
NAME = 'name'
EXPECTED = {
'prefix': 'subfolder',
'delimiter': '/',
'maxResults': 10,
'versions': True,
}
connection = _Connection({'items': []})
bucket = self._makeOne(connection, NAME)
iterator = bucket.iterator(
prefix='subfolder',
delimiter='/',
max_results=10,
versions=True,
)
keys = list(iterator)
self.assertEqual(keys, [])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], EXPECTED)

def test_new_key_existing(self):
from gcloud.storage.key import Key
existing = Key()
Expand Down Expand Up @@ -882,46 +960,6 @@ def get_items_from_response(self, response):
self.assertEqual(kw[1]['query_params'], {})


class Test__KeyIterator(unittest2.TestCase):

def _getTargetClass(self):
from gcloud.storage.bucket import _KeyIterator
return _KeyIterator

def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)

def test_ctor(self):
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
self.assertTrue(iterator.bucket is bucket)
self.assertTrue(iterator.connection is connection)
self.assertEqual(iterator.path, '%s/o' % bucket.path)
self.assertEqual(iterator.page_number, 0)
self.assertEqual(iterator.next_page_token, None)

def test_get_items_from_response_empty(self):
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
self.assertEqual(list(iterator.get_items_from_response({})), [])

def test_get_items_from_response_non_empty(self):
from gcloud.storage.key import Key
KEY = 'key'
response = {'items': [{'name': KEY}]}
connection = _Connection()
bucket = _Bucket(connection)
iterator = self._makeOne(bucket)
keys = list(iterator.get_items_from_response(response))
self.assertEqual(len(keys), 1)
key = keys[0]
self.assertTrue(isinstance(key, Key))
self.assertTrue(key.connection is connection)
self.assertEqual(key.name, KEY)


class _Connection(object):
_delete_ok = False

Expand Down
7 changes: 3 additions & 4 deletions regression/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,12 +190,11 @@ def test_list_files(self):

def test_paginate_files(self):
truncation_size = 1
extra_params = {'maxResults': len(self.FILENAMES) - truncation_size}
iterator = storage.key._KeyIterator(bucket=self.bucket,
extra_params=extra_params)
count = len(self.FILENAMES) - truncation_size
iterator = self.bucket.iterator(max_results=count)
response = iterator.get_next_page_response()
keys = list(iterator.get_items_from_response(response))
self.assertEqual(len(keys), extra_params['maxResults'])
self.assertEqual(len(keys), count)
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is not None)

Expand Down