Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
255 changes: 255 additions & 0 deletions docs/storage_snippets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,255 @@
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Testable usage examples for Google Cloud Storage API wrapper

Each example function takes a ``client`` argument (which must be an instance
of :class:`google.cloud.storage.client.Client`) and uses it to perform a task
with the API.

To facilitate running the examples as system tests, each example is also passed
a ``to_delete`` list; the function adds to the list any objects created which
need to be deleted during teardown.
"""

from google.cloud import storage


def snippet(func):
"""Mark ``func`` as a snippet example function."""
func._snippet = True
return func


@snippet
def storage_get_started(client, to_delete):
# [START storage_get_started]
client = storage.Client()
bucket = client.get_bucket('bucket-id-here')
# Then do other things...
blob = bucket.get_blob('/remote/path/to/file.txt')
assert blob.download_as_string() == 'My old contents!'
blob.upload_from_string('New contents!')
blob2 = bucket.blob('/remote/path/storage.txt')
blob2.upload_from_filename(filename='/local/path.txt')
# [END storage_get_started]

to_delete.append(bucket)


@snippet
def client_bucket_acl(client, to_delete):
bucket_name = 'system-test-bucket'
bucket = client.bucket(bucket_name)
bucket.create()

# [START client_bucket_acl]
client = storage.Client()
bucket = client.get_bucket(bucket_name)
acl = bucket.acl
# [END client_bucket_acl]
to_delete.append(bucket)

# [START acl_user_settings]
acl.user('[email protected]').grant_read()
acl.all_authenticated().grant_write()
# [END acl_user_settings]

# [START acl_save]
acl.save()
# [END acl_save]

# [START acl_revoke_write]
acl.all().grant_read().revoke_write()
# [END acl_revoke_write]

# [START acl_save_bucket]
bucket.acl.save(acl=acl)
# [END acl_save_bucket]

# [START acl_print]
print(list(acl))
# [{'role': 'OWNER', 'entity': 'allUsers'}, ...]
# [END acl_print]


@snippet
def download_to_file(client, to_delete):
# [START download_to_file]
from google.cloud.storage import Blob

client = storage.Client(project='my-project')
bucket = client.get_bucket('my-bucket')
encryption_key = 'c7f32af42e45e85b9848a6a14dd2a8f6'
blob = Blob('secure-data', bucket, encryption_key=encryption_key)
with open('/tmp/my-secure-file', 'wb') as file_obj:
blob.download_to_file(file_obj)
# [END download_to_file]

to_delete.append(blob)


@snippet
def upload_from_file(client, to_delete):
# [START upload_from_file]
from google.cloud.storage import Blob

client = storage.Client(project='my-project')
bucket = client.get_bucket('my-bucket')
encryption_key = 'aa426195405adee2c8081bb9e7e74b19'

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

blob = Blob('secure-data', bucket, encryption_key=encryption_key)
with open('my-file', 'rb') as my_file:
blob.upload_from_file(my_file)
# [END upload_from_file]

to_delete.append(blob)


@snippet
def get_blob(client, to_delete):
from google.cloud.storage.blob import Blob
# [START get_blob]
client = storage.Client()
bucket = client.get_bucket('my-bucket')
assert isinstance(bucket.get_blob('/path/to/blob.txt'), Blob)
# <Blob: my-bucket, /path/to/blob.txt>
assert not bucket.get_blob('/does-not-exist.txt')
# None

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

# [END get_blob]

to_delete.append(bucket)


@snippet
def delete_blob(client, to_delete):
# [START delete_blob]
from google.cloud.exceptions import NotFound
client = storage.Client()
bucket = client.get_bucket('my-bucket')
assert isinstance(bucket.list_blobs(), list)
# [<Blob: my-bucket, my-file.txt>]
bucket.delete_blob('my-file.txt')
try:
bucket.delete_blob('doesnt-exist')
except NotFound:
pass
# [END delete_blob]

blob = None
# [START delete_blobs]
bucket.delete_blobs([blob], on_error=lambda blob: None)
# [END delete_blobs]

to_delete.append(bucket)


@snippet
def configure_website(client, to_delete):
bucket_name = 'test-bucket'
# [START configure_website]
client = storage.Client()
bucket = client.get_bucket(bucket_name)
bucket.configure_website('index.html', '404.html')
# [END configure_website]

# [START make_public]
bucket.make_public(recursive=True, future=True)
# [END make_public]

to_delete.append(bucket)


@snippet
def get_bucket(client, to_delete):
import google
# [START get_bucket]
try:
bucket = client.get_bucket('my-bucket')
except google.cloud.exceptions.NotFound:
print('Sorry, that bucket does not exist!')
# [END get_bucket]
to_delete.append(bucket)


@snippet
def lookup_bucket(client, to_delete):
from google.cloud.storage.bucket import Bucket
# [START lookup_bucket]
bucket = client.lookup_bucket('doesnt-exist')
assert not bucket
# None
bucket = client.lookup_bucket('my-bucket')
assert isinstance(bucket, Bucket)
# <Bucket: my-bucket>
# [END lookup_bucket]

to_delete.append(bucket)


@snippet
def create_bucket(client, to_delete):
from google.cloud.storage import Bucket
# [START create_bucket]
bucket = client.create_bucket('my-bucket')
assert isinstance(bucket, Bucket)
# <Bucket: my-bucket>
# [END create_bucket]

to_delete.append(bucket)


@snippet
def list_buckets(client, to_delete):
# [START list_buckets]
for bucket in client.list_buckets():
print(bucket)
# [END list_buckets]

for bucket in client.list_buckets():
to_delete.append(bucket)


def _line_no(func):
code = getattr(func, '__code__', None) or getattr(func, 'func_code')
return code.co_firstlineno


def _find_examples():
funcs = [obj for obj in globals().values()
if getattr(obj, '_snippet', False)]
for func in sorted(funcs, key=_line_no):
yield func


def _name_and_doc(func):
return func.__name__, func.__doc__


def main():
client = storage.Client()
for example in _find_examples():
to_delete = []
print('%-25s: %s' % _name_and_doc(example))
try:
example(client, to_delete)
except AssertionError as failure:
print(' FAIL: %s' % (failure,))
except Exception as error: # pylint: disable=broad-except
print(' ERROR: %r' % (error,))
for item in to_delete:
item.delete()


if __name__ == '__main__':
main()
12 changes: 3 additions & 9 deletions storage/google/cloud/storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,9 @@

You'll typically use these to get started with the API:

>>> from google.cloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket('bucket-id-here')
>>> # Then do other things...
>>> blob = bucket.get_blob('/remote/path/to/file.txt')
>>> print(blob.download_as_string())
>>> blob.upload_from_string('New contents!')
>>> blob2 = bucket.blob('/remote/path/storage.txt')
>>> blob2.upload_from_filename(filename='/local/path.txt')
.. literalinclude:: storage_snippets.py
:start-after: [START storage_get_started]
:end-before: [END storage_get_started]

The main concepts with this API are:

Expand Down
42 changes: 25 additions & 17 deletions storage/google/cloud/storage/acl.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@

:class:`google.cloud.storage.bucket.Bucket` has a getting method that creates
an ACL object under the hood, and you can interact with that using
:func:`google.cloud.storage.bucket.Bucket.acl`::
:func:`google.cloud.storage.bucket.Bucket.acl`:

.. literalinclude:: storage_snippets.py
:start-after: [START client_bucket_acl]
:end-before: [END client_bucket_acl]

>>> from google.cloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket(bucket_name)
>>> acl = bucket.acl

Adding and removing permissions can be done with the following methods
(in increasing order of granularity):
Expand All @@ -47,32 +47,40 @@
:func:`_ACLEntity.grant_owner` and :func:`_ACLEntity.revoke_owner`

You can use any of these like any other factory method (these happen to
be :class:`_ACLEntity` factories)::
be :class:`_ACLEntity` factories):

>>> acl.user('[email protected]').grant_read()
>>> acl.all_authenticated().grant_write()
.. literalinclude:: storage_snippets.py
:start-after: [START acl_user_settings]
:end-before: [END acl_user_settings]

You can also chain these ``grant_*`` and ``revoke_*`` methods together
for brevity::
for brevity:

>>> acl.all().grant_read().revoke_write()
.. literalinclude:: storage_snippets.py
:start-after: [START acl_revoke_write]
:end-before: [END acl_revoke_write]

After that, you can save any changes you make with the
:func:`google.cloud.storage.acl.ACL.save` method::
:func:`google.cloud.storage.acl.ACL.save` method:

>>> acl.save()
.. literalinclude:: storage_snippets.py
:start-after: [START acl_save]
:end-before: [END acl_save]

You can alternatively save any existing :class:`google.cloud.storage.acl.ACL`
object (whether it was created by a factory method or not) from a
:class:`google.cloud.storage.bucket.Bucket`::
:class:`google.cloud.storage.bucket.Bucket`:

>>> bucket.acl.save(acl=acl)
.. literalinclude:: storage_snippets.py
:start-after: [START acl_save_bucket]
:end-before: [END acl_save_bucket]

To get the list of ``entity`` and ``role`` for each unique pair, the
:class:`ACL` class is iterable::
:class:`ACL` class is iterable:

>>> print(list(ACL))
[{'role': 'OWNER', 'entity': 'allUsers'}, ...]
.. literalinclude:: storage_snippets.py
:start-after: [START acl_print]
:end-before: [END acl_print]

This list of tuples can be used as the ``entity`` and ``role`` fields
when sending metadata for ACLs to the API.
Expand Down
30 changes: 8 additions & 22 deletions storage/google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,18 +299,11 @@ def download_to_file(self, file_obj, client=None):
initialized, makes an additional API request to load it.

Downloading a file that has been encrypted with a `customer-supplied`_
encryption key::
encryption key:

>>> from google.cloud import storage
>>> from google.cloud.storage import Blob

>>> client = storage.Client(project='my-project')
>>> bucket = client.get_bucket('my-bucket')
>>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
>>> blob = Blob('secure-data', bucket,
... encryption_key=encryption_key)
>>> with open('/tmp/my-secure-file', 'wb') as file_obj:
>>> blob.download_to_file(file_obj)
.. literalinclude:: storage_snippets.py
:start-after: [START download_to_file]
:end-before: [END download_to_file]

The ``encryption_key`` should be a str or bytes with a length of at
least 32.
Expand Down Expand Up @@ -418,18 +411,11 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
API documents for details.

Uploading a file with a `customer-supplied`_ encryption key::

>>> from google.cloud import storage
>>> from google.cloud.storage import Blob
Uploading a file with a `customer-supplied`_ encryption key:

>>> client = storage.Client(project='my-project')
>>> bucket = client.get_bucket('my-bucket')
>>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
>>> blob = Blob('secure-data', bucket,
... encryption_key=encryption_key)
>>> with open('my-file', 'rb') as my_file:
>>> blob.upload_from_file(my_file)
.. literalinclude:: storage_snippets.py
:start-after: [START upload_from_file]
:end-before: [END upload_from_file]

The ``encryption_key`` should be a str or bytes with a length of at
least 32.
Expand Down
Loading