diff --git a/docs/storage_snippets.py b/docs/storage_snippets.py new file mode 100644 index 000000000000..a79c8114201d --- /dev/null +++ b/docs/storage_snippets.py @@ -0,0 +1,255 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Testable usage examples for Google Cloud Storage API wrapper + +Each example function takes a ``client`` argument (which must be an instance +of :class:`google.cloud.storage.client.Client`) and uses it to perform a task +with the API. + +To facilitate running the examples as system tests, each example is also passed +a ``to_delete`` list; the function adds to the list any objects created which +need to be deleted during teardown. +""" + +from google.cloud import storage + + +def snippet(func): + """Mark ``func`` as a snippet example function.""" + func._snippet = True + return func + + +@snippet +def storage_get_started(client, to_delete): + # [START storage_get_started] + client = storage.Client() + bucket = client.get_bucket('bucket-id-here') + # Then do other things... + blob = bucket.get_blob('/remote/path/to/file.txt') + assert blob.download_as_string() == 'My old contents!' + blob.upload_from_string('New contents!') + blob2 = bucket.blob('/remote/path/storage.txt') + blob2.upload_from_filename(filename='/local/path.txt') + # [END storage_get_started] + + to_delete.append(bucket) + + +@snippet +def client_bucket_acl(client, to_delete): + bucket_name = 'system-test-bucket' + bucket = client.bucket(bucket_name) + bucket.create() + + # [START client_bucket_acl] + client = storage.Client() + bucket = client.get_bucket(bucket_name) + acl = bucket.acl + # [END client_bucket_acl] + to_delete.append(bucket) + + # [START acl_user_settings] + acl.user('me@example.org').grant_read() + acl.all_authenticated().grant_write() + # [END acl_user_settings] + + # [START acl_save] + acl.save() + # [END acl_save] + + # [START acl_revoke_write] + acl.all().grant_read().revoke_write() + # [END acl_revoke_write] + + # [START acl_save_bucket] + bucket.acl.save(acl=acl) + # [END acl_save_bucket] + + # [START acl_print] + print(list(acl)) + # [{'role': 'OWNER', 'entity': 'allUsers'}, ...] + # [END acl_print] + + +@snippet +def download_to_file(client, to_delete): + # [START download_to_file] + from google.cloud.storage import Blob + + client = storage.Client(project='my-project') + bucket = client.get_bucket('my-bucket') + encryption_key = 'c7f32af42e45e85b9848a6a14dd2a8f6' + blob = Blob('secure-data', bucket, encryption_key=encryption_key) + with open('/tmp/my-secure-file', 'wb') as file_obj: + blob.download_to_file(file_obj) + # [END download_to_file] + + to_delete.append(blob) + + +@snippet +def upload_from_file(client, to_delete): + # [START upload_from_file] + from google.cloud.storage import Blob + + client = storage.Client(project='my-project') + bucket = client.get_bucket('my-bucket') + encryption_key = 'aa426195405adee2c8081bb9e7e74b19' + blob = Blob('secure-data', bucket, encryption_key=encryption_key) + with open('my-file', 'rb') as my_file: + blob.upload_from_file(my_file) + # [END upload_from_file] + + to_delete.append(blob) + + +@snippet +def get_blob(client, to_delete): + from google.cloud.storage.blob import Blob + # [START get_blob] + client = storage.Client() + bucket = client.get_bucket('my-bucket') + assert isinstance(bucket.get_blob('/path/to/blob.txt'), Blob) + # + assert not bucket.get_blob('/does-not-exist.txt') + # None + # [END get_blob] + + to_delete.append(bucket) + + +@snippet +def delete_blob(client, to_delete): + # [START delete_blob] + from google.cloud.exceptions import NotFound + client = storage.Client() + bucket = client.get_bucket('my-bucket') + assert isinstance(bucket.list_blobs(), list) + # [] + bucket.delete_blob('my-file.txt') + try: + bucket.delete_blob('doesnt-exist') + except NotFound: + pass + # [END delete_blob] + + blob = None + # [START delete_blobs] + bucket.delete_blobs([blob], on_error=lambda blob: None) + # [END delete_blobs] + + to_delete.append(bucket) + + +@snippet +def configure_website(client, to_delete): + bucket_name = 'test-bucket' + # [START configure_website] + client = storage.Client() + bucket = client.get_bucket(bucket_name) + bucket.configure_website('index.html', '404.html') + # [END configure_website] + + # [START make_public] + bucket.make_public(recursive=True, future=True) + # [END make_public] + + to_delete.append(bucket) + + +@snippet +def get_bucket(client, to_delete): + import google + # [START get_bucket] + try: + bucket = client.get_bucket('my-bucket') + except google.cloud.exceptions.NotFound: + print('Sorry, that bucket does not exist!') + # [END get_bucket] + to_delete.append(bucket) + + +@snippet +def lookup_bucket(client, to_delete): + from google.cloud.storage.bucket import Bucket + # [START lookup_bucket] + bucket = client.lookup_bucket('doesnt-exist') + assert not bucket + # None + bucket = client.lookup_bucket('my-bucket') + assert isinstance(bucket, Bucket) + # + # [END lookup_bucket] + + to_delete.append(bucket) + + +@snippet +def create_bucket(client, to_delete): + from google.cloud.storage import Bucket + # [START create_bucket] + bucket = client.create_bucket('my-bucket') + assert isinstance(bucket, Bucket) + # + # [END create_bucket] + + to_delete.append(bucket) + + +@snippet +def list_buckets(client, to_delete): + # [START list_buckets] + for bucket in client.list_buckets(): + print(bucket) + # [END list_buckets] + + for bucket in client.list_buckets(): + to_delete.append(bucket) + + +def _line_no(func): + code = getattr(func, '__code__', None) or getattr(func, 'func_code') + return code.co_firstlineno + + +def _find_examples(): + funcs = [obj for obj in globals().values() + if getattr(obj, '_snippet', False)] + for func in sorted(funcs, key=_line_no): + yield func + + +def _name_and_doc(func): + return func.__name__, func.__doc__ + + +def main(): + client = storage.Client() + for example in _find_examples(): + to_delete = [] + print('%-25s: %s' % _name_and_doc(example)) + try: + example(client, to_delete) + except AssertionError as failure: + print(' FAIL: %s' % (failure,)) + except Exception as error: # pylint: disable=broad-except + print(' ERROR: %r' % (error,)) + for item in to_delete: + item.delete() + + +if __name__ == '__main__': + main() diff --git a/storage/google/cloud/storage/__init__.py b/storage/google/cloud/storage/__init__.py index c7ad7c8ecf32..10db1a1ce543 100644 --- a/storage/google/cloud/storage/__init__.py +++ b/storage/google/cloud/storage/__init__.py @@ -16,15 +16,9 @@ You'll typically use these to get started with the API: ->>> from google.cloud import storage ->>> client = storage.Client() ->>> bucket = client.get_bucket('bucket-id-here') ->>> # Then do other things... ->>> blob = bucket.get_blob('/remote/path/to/file.txt') ->>> print(blob.download_as_string()) ->>> blob.upload_from_string('New contents!') ->>> blob2 = bucket.blob('/remote/path/storage.txt') ->>> blob2.upload_from_filename(filename='/local/path.txt') +.. literalinclude:: storage_snippets.py + :start-after: [START storage_get_started] + :end-before: [END storage_get_started] The main concepts with this API are: diff --git a/storage/google/cloud/storage/acl.py b/storage/google/cloud/storage/acl.py index e93e292d9d97..a133ad443dff 100644 --- a/storage/google/cloud/storage/acl.py +++ b/storage/google/cloud/storage/acl.py @@ -16,12 +16,12 @@ :class:`google.cloud.storage.bucket.Bucket` has a getting method that creates an ACL object under the hood, and you can interact with that using -:func:`google.cloud.storage.bucket.Bucket.acl`:: +:func:`google.cloud.storage.bucket.Bucket.acl`: + +.. literalinclude:: storage_snippets.py + :start-after: [START client_bucket_acl] + :end-before: [END client_bucket_acl] - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket(bucket_name) - >>> acl = bucket.acl Adding and removing permissions can be done with the following methods (in increasing order of granularity): @@ -47,32 +47,40 @@ :func:`_ACLEntity.grant_owner` and :func:`_ACLEntity.revoke_owner` You can use any of these like any other factory method (these happen to -be :class:`_ACLEntity` factories):: +be :class:`_ACLEntity` factories): - >>> acl.user('me@example.org').grant_read() - >>> acl.all_authenticated().grant_write() +.. literalinclude:: storage_snippets.py + :start-after: [START acl_user_settings] + :end-before: [END acl_user_settings] You can also chain these ``grant_*`` and ``revoke_*`` methods together -for brevity:: +for brevity: - >>> acl.all().grant_read().revoke_write() +.. literalinclude:: storage_snippets.py + :start-after: [START acl_revoke_write] + :end-before: [END acl_revoke_write] After that, you can save any changes you make with the -:func:`google.cloud.storage.acl.ACL.save` method:: +:func:`google.cloud.storage.acl.ACL.save` method: - >>> acl.save() +.. literalinclude:: storage_snippets.py + :start-after: [START acl_save] + :end-before: [END acl_save] You can alternatively save any existing :class:`google.cloud.storage.acl.ACL` object (whether it was created by a factory method or not) from a -:class:`google.cloud.storage.bucket.Bucket`:: +:class:`google.cloud.storage.bucket.Bucket`: - >>> bucket.acl.save(acl=acl) +.. literalinclude:: storage_snippets.py + :start-after: [START acl_save_bucket] + :end-before: [END acl_save_bucket] To get the list of ``entity`` and ``role`` for each unique pair, the -:class:`ACL` class is iterable:: +:class:`ACL` class is iterable: - >>> print(list(ACL)) - [{'role': 'OWNER', 'entity': 'allUsers'}, ...] +.. literalinclude:: storage_snippets.py + :start-after: [START acl_print] + :end-before: [END acl_print] This list of tuples can be used as the ``entity`` and ``role`` fields when sending metadata for ACLs to the API. diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index b409bc29afcd..69b9d1f33ff9 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -299,18 +299,11 @@ def download_to_file(self, file_obj, client=None): initialized, makes an additional API request to load it. Downloading a file that has been encrypted with a `customer-supplied`_ - encryption key:: + encryption key: - >>> from google.cloud import storage - >>> from google.cloud.storage import Blob - - >>> client = storage.Client(project='my-project') - >>> bucket = client.get_bucket('my-bucket') - >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19' - >>> blob = Blob('secure-data', bucket, - ... encryption_key=encryption_key) - >>> with open('/tmp/my-secure-file', 'wb') as file_obj: - >>> blob.download_to_file(file_obj) + .. literalinclude:: storage_snippets.py + :start-after: [START download_to_file] + :end-before: [END download_to_file] The ``encryption_key`` should be a str or bytes with a length of at least 32. @@ -418,18 +411,11 @@ def upload_from_file(self, file_obj, rewind=False, size=None, `lifecycle `_ API documents for details. - Uploading a file with a `customer-supplied`_ encryption key:: - - >>> from google.cloud import storage - >>> from google.cloud.storage import Blob + Uploading a file with a `customer-supplied`_ encryption key: - >>> client = storage.Client(project='my-project') - >>> bucket = client.get_bucket('my-bucket') - >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19' - >>> blob = Blob('secure-data', bucket, - ... encryption_key=encryption_key) - >>> with open('my-file', 'rb') as my_file: - >>> blob.upload_from_file(my_file) + .. literalinclude:: storage_snippets.py + :start-after: [START upload_from_file] + :end-before: [END upload_from_file] The ``encryption_key`` should be a str or bytes with a length of at least 32. diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index c4705645c427..360370f41fbb 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -209,15 +209,11 @@ def path(self): def get_blob(self, blob_name, client=None): """Get a blob object by name. - This will return None if the blob doesn't exist:: + This will return None if the blob doesn't exist: - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket('my-bucket') - >>> print(bucket.get_blob('/path/to/blob.txt')) - - >>> print(bucket.get_blob('/does-not-exist.txt')) - None + .. literalinclude:: storage_snippets.py + :start-after: [START get_blob] + :end-before: [END get_blob] :type blob_name: str :param blob_name: The name of the blob to retrieve. @@ -372,19 +368,11 @@ def delete_blob(self, blob_name, client=None): If the blob isn't found (backend 404), raises a :class:`google.cloud.exceptions.NotFound`. - For example:: + For example: - >>> from google.cloud.exceptions import NotFound - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket('my-bucket') - >>> print(bucket.list_blobs()) - [] - >>> bucket.delete_blob('my-file.txt') - >>> try: - ... bucket.delete_blob('doesnt-exist') - ... except NotFound: - ... pass + .. literalinclude:: storage_snippets.py + :start-after: [START delete_blob] + :end-before: [END delete_blob] :type blob_name: str :param blob_name: A blob name to delete. @@ -396,9 +384,12 @@ def delete_blob(self, blob_name, client=None): :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op - ``on_error`` callback, e.g.:: + ``on_error`` callback, e.g.: + + .. literalinclude:: storage_snippets.py + :start-after: [START delete_blobs] + :end-before: [END delete_blobs] - >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ client = self._require_client(client) blob_path = Blob.path_helper(self.path, blob_name) @@ -752,16 +743,17 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): (and to do that, you need to get approved somehow...). If you want this bucket to host a website, just provide the name - of an index page and a page to use when a blob isn't found:: + of an index page and a page to use when a blob isn't found: - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket(bucket_name) - >>> bucket.configure_website('index.html', '404.html') + .. literalinclude:: storage_snippets.py + :start-after: [START configure_website] + :end-before: [END configure_website] - You probably should also make the whole bucket public:: + You probably should also make the whole bucket public: - >>> bucket.make_public(recursive=True, future=True) + .. literalinclude:: storage_snippets.py + :start-after: [START make_public] + :end-before: [END make_public] This says: "Make the bucket public, and all the stuff already in the bucket, and anything else I add to the bucket. Just make it diff --git a/storage/google/cloud/storage/client.py b/storage/google/cloud/storage/client.py index f8a64a3dc98c..166f702f309d 100644 --- a/storage/google/cloud/storage/client.py +++ b/storage/google/cloud/storage/client.py @@ -149,12 +149,9 @@ def get_bucket(self, bucket_name): For example: - .. code-block:: python - - >>> try: - >>> bucket = client.get_bucket('my-bucket') - >>> except google.cloud.exceptions.NotFound: - >>> print('Sorry, that bucket does not exist!') + .. literalinclude:: storage_snippets.py + :start-after: [START get_bucket] + :end-before: [END get_bucket] This implements "storage.buckets.get". @@ -175,14 +172,9 @@ def lookup_bucket(self, bucket_name): You can use this if you would rather check for a None value than catching an exception: - .. code-block:: python - - >>> bucket = client.lookup_bucket('doesnt-exist') - >>> print(bucket) - None - >>> bucket = client.lookup_bucket('my-bucket') - >>> print(bucket) - + .. literalinclude:: storage_snippets.py + :start-after: [START lookup_bucket] + :end-before: [END lookup_bucket] :type bucket_name: str :param bucket_name: The name of the bucket to get. @@ -200,11 +192,9 @@ def create_bucket(self, bucket_name): For example: - .. code-block:: python - - >>> bucket = client.create_bucket('my-bucket') - >>> print(bucket) - + .. literalinclude:: storage_snippets.py + :start-after: [START create_bucket] + :end-before: [END create_bucket] This implements "storage.buckets.insert". @@ -228,10 +218,9 @@ def list_buckets(self, max_results=None, page_token=None, prefix=None, This will not populate the list of blobs available in each bucket. - .. code-block:: python - - >>> for bucket in client.list_buckets(): - ... print(bucket) + .. literalinclude:: storage_snippets.py + :start-after: [START list_buckets] + :end-before: [END list_buckets] This implements "storage.buckets.list".