From 1b1fb11c0eff8d470933d934ad62bdac6514c9af Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Jul 2015 12:19:10 -0700 Subject: [PATCH] Updating docs, demo and system tests after storage.api deletion. --- README.rst | 3 ++- docs/_components/storage-getting-started.rst | 6 ++--- docs/_components/storage-quickstart.rst | 4 +-- docs/index.rst | 3 ++- gcloud/storage/__init__.py | 4 ++- gcloud/storage/acl.py | 4 +-- gcloud/storage/bucket.py | 28 ++++++++++---------- gcloud/storage/demo/__init__.py | 13 +-------- gcloud/storage/demo/demo.py | 10 +++---- system_tests/storage.py | 13 ++++----- 10 files changed, 41 insertions(+), 47 deletions(-) diff --git a/README.rst b/README.rst index 2496fa291794..a8a4abf5cba7 100644 --- a/README.rst +++ b/README.rst @@ -101,7 +101,8 @@ how to create a bucket. .. code:: python from gcloud import storage - bucket = storage.get_bucket('bucket-id-here') + client = storage.Client() + bucket = client.get_bucket('bucket-id-here') # Then do other things... blob = bucket.get_blob('/remote/path/to/file.txt') print blob.download_as_string() diff --git a/docs/_components/storage-getting-started.rst b/docs/_components/storage-getting-started.rst index ccf9be46b0bc..eb86bf6a49b6 100644 --- a/docs/_components/storage-getting-started.rst +++ b/docs/_components/storage-getting-started.rst @@ -56,7 +56,7 @@ bucket. Let's create a bucket: - >>> bucket = storage.create_bucket('test', project_name, connection=connection) + >>> bucket = client.create_bucket('test') Traceback (most recent call last): File "", line 1, in File "gcloud/storage/connection.py", line 340, in create_bucket @@ -184,8 +184,8 @@ If you have a full bucket, you can delete it this way:: Listing available buckets ------------------------- - >>> for bucket in storage.list_buckets(connection): - ... print bucket.name + >>> for bucket in client.list_buckets(): + ... print bucket.name Managing access control ----------------------- diff --git a/docs/_components/storage-quickstart.rst b/docs/_components/storage-quickstart.rst index 64c300778031..5d5137092dfb 100644 --- a/docs/_components/storage-quickstart.rst +++ b/docs/_components/storage-quickstart.rst @@ -56,9 +56,9 @@ Once you have the connection, you can create buckets and blobs:: >>> from gcloud import storage - >>> storage.list_buckets(connection) + >>> client.list_buckets() [, ...] - >>> bucket = storage.create_bucket('my-new-bucket', connection=connection) + >>> bucket = client.create_bucket('my-new-bucket') >>> print bucket >>> blob = storage.Blob('my-test-file.txt', bucket=bucket) diff --git a/docs/index.rst b/docs/index.rst index 963d2fe1cbce..2d418866a740 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -50,6 +50,7 @@ Cloud Storage .. code-block:: python from gcloud import storage - bucket = storage.get_bucket('') + client = storage.Client() + bucket = client.get_bucket('') blob = storage.Blob('my-test-file.txt', bucket=bucket) blob = blob.upload_contents_from_string('this is test content!') diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index 8a2b0c0d1a73..8fa382865df2 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -17,7 +17,8 @@ You'll typically use these to get started with the API: >>> from gcloud import storage ->>> bucket = storage.get_bucket('bucket-id-here') +>>> client = storage.Client() +>>> bucket = client.get_bucket('bucket-id-here') >>> # Then do other things... >>> blob = bucket.get_blob('/remote/path/to/file.txt') >>> print blob.download_as_string() @@ -50,6 +51,7 @@ from gcloud.storage.batch import Batch from gcloud.storage.blob import Blob from gcloud.storage.bucket import Bucket +from gcloud.storage.client import Client from gcloud.storage.connection import SCOPE from gcloud.storage.connection import Connection diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 45725d12afdc..673aa05d744d 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -19,8 +19,8 @@ :func:`gcloud.storage.bucket.Bucket.acl`:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket(bucket_name, connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket(bucket_name) >>> acl = bucket.acl Adding and removing permissions can be done with the following methods diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 02d66d9561a8..a20cbe408d5c 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -194,8 +194,8 @@ def get_blob(self, blob_name, connection=None): This will return None if the blob doesn't exist:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> print bucket.get_blob('/path/to/blob.txt') >>> print bucket.get_blob('/does-not-exist.txt') @@ -356,8 +356,8 @@ def delete_blob(self, blob_name, connection=None): >>> from gcloud.exceptions import NotFound >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> print bucket.list_blobs() [] >>> bucket.delete_blob('my-file.txt') @@ -463,8 +463,8 @@ def upload_file(self, filename, blob_name=None, connection=None): For example:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt') >>> print bucket.list_blobs() [] @@ -473,8 +473,8 @@ def upload_file(self, filename, blob_name=None, connection=None): using the local filename (**not** the complete path):: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file('~/my-file.txt') >>> print bucket.list_blobs() [] @@ -510,8 +510,8 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): For example:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') >>> print bucket.list_blobs() [] @@ -520,8 +520,8 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): using the local filename (**not** the complete path):: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket('my-bucket', connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket('my-bucket') >>> bucket.upload_file(open('~/my-file.txt')) >>> print bucket.list_blobs() [] @@ -788,8 +788,8 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): of an index page and a page to use when a blob isn't found:: >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket = storage.get_bucket(bucket_name, connection=connection) + >>> client = storage.Client() + >>> bucket = client.get_bucket(bucket_name) >>> bucket.configure_website('index.html', '404.html') You probably should also make the whole bucket public:: diff --git a/gcloud/storage/demo/__init__.py b/gcloud/storage/demo/__init__.py index 0558e7f8bf6c..a441bc2508b2 100644 --- a/gcloud/storage/demo/__init__.py +++ b/gcloud/storage/demo/__init__.py @@ -13,18 +13,7 @@ # limitations under the License. import os -from gcloud import storage -__all__ = ['create_bucket', 'list_buckets', 'PROJECT_ID'] +__all__ = ['PROJECT_ID'] PROJECT_ID = os.getenv('GCLOUD_TESTS_PROJECT_ID') - - -def list_buckets(connection): - return list(storage.list_buckets(project=PROJECT_ID, - connection=connection)) - - -def create_bucket(bucket_name, connection): - return storage.create_bucket(bucket_name, PROJECT_ID, - connection=connection) diff --git a/gcloud/storage/demo/demo.py b/gcloud/storage/demo/demo.py index 93d8766f4049..ddad71f59dfb 100644 --- a/gcloud/storage/demo/demo.py +++ b/gcloud/storage/demo/demo.py @@ -16,25 +16,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Let's start by importing the demo module and getting a connection: +# Let's start by importing the demo module and getting a client: import time from gcloud import storage from gcloud.storage import demo -connection = storage.get_connection() +client = storage.Client(project=demo.PROJECT_ID) # OK, now let's look at all of the buckets... -print(list(demo.list_buckets(connection))) # This might take a second... +print(list(client.list_buckets())) # This might take a second... # Now let's create a new bucket... bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots. print(bucket_name) -bucket = demo.create_bucket(bucket_name, connection) +bucket = client.create_bucket(bucket_name) print(bucket) # Let's look at all of the buckets again... -print(list(demo.list_buckets(connection))) +print(list(client.list_buckets())) # How about we create a new blob inside this bucket. blob = storage.Blob("my-new-file.txt", bucket=bucket) diff --git a/system_tests/storage.py b/system_tests/storage.py index 7ee7b3f3a217..dcfcbd444a3b 100644 --- a/system_tests/storage.py +++ b/system_tests/storage.py @@ -28,6 +28,7 @@ SHARED_BUCKETS = {} _helpers._PROJECT_ENV_VAR_NAME = 'GCLOUD_TESTS_PROJECT_ID' +CLIENT = storage.Client() def setUpModule(): @@ -36,7 +37,7 @@ def setUpModule(): bucket_name = 'new%d' % (1000 * time.time(),) # In the **very** rare case the bucket name is reserved, this # fails with a ConnectionError. - SHARED_BUCKETS['test_bucket'] = storage.create_bucket(bucket_name) + SHARED_BUCKETS['test_bucket'] = CLIENT.create_bucket(bucket_name) def tearDownModule(): @@ -57,12 +58,12 @@ def tearDown(self): def test_create_bucket(self): new_bucket_name = 'a-new-bucket' self.assertRaises(exceptions.NotFound, - storage.get_bucket, new_bucket_name) - created = storage.create_bucket(new_bucket_name) + CLIENT.get_bucket, new_bucket_name) + created = CLIENT.create_bucket(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) - def test_get_buckets(self): + def test_list_buckets(self): buckets_to_create = [ 'new%d' % (1000 * time.time(),), 'newer%d' % (1000 * time.time(),), @@ -70,11 +71,11 @@ def test_get_buckets(self): ] created_buckets = [] for bucket_name in buckets_to_create: - bucket = storage.create_bucket(bucket_name) + bucket = CLIENT.create_bucket(bucket_name) self.case_buckets_to_delete.append(bucket_name) # Retrieve the buckets. - all_buckets = storage.list_buckets() + all_buckets = CLIENT.list_buckets() created_buckets = [bucket for bucket in all_buckets if bucket.name in buckets_to_create] self.assertEqual(len(created_buckets), len(buckets_to_create))