Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions google/cloud/storage/fileio.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ class BlobReader(io.BufferedIOBase):
configuration changes for Retry objects such as delays and deadlines
are respected.

:type download_kwargs: dict
:param download_kwargs:
Keyword arguments to pass to the underlying API calls.
The following arguments are supported:
Expand All @@ -98,9 +99,10 @@ class BlobReader(io.BufferedIOBase):
- ``if_metageneration_match``
- ``if_metageneration_not_match``
- ``timeout``
- ``raw_download``

Note that download_kwargs are also applied to blob.reload(), if a reload
is needed during seek().
Note that download_kwargs (excluding ``raw_download``) are also applied to blob.reload(),
if a reload is needed during seek().
"""

def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY, **download_kwargs):
Expand Down Expand Up @@ -175,7 +177,10 @@ def seek(self, pos, whence=0):
self._checkClosed() # Raises ValueError if closed.

if self._blob.size is None:
self._blob.reload(**self._download_kwargs)
reload_kwargs = {
k: v for k, v in self._download_kwargs.items() if k != "raw_download"
}
self._blob.reload(**reload_kwargs)

initial_offset = self._pos + self._buffer.tell()

Expand Down Expand Up @@ -272,6 +277,7 @@ class BlobWriter(io.BufferedIOBase):
configuration changes for Retry objects such as delays and deadlines
are respected.

:type upload_kwargs: dict
:param upload_kwargs:
Keyword arguments to pass to the underlying API
calls. The following arguments are supported:
Expand Down
23 changes: 23 additions & 0 deletions tests/system/test_fileio.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,3 +116,26 @@ def test_blobwriter_exit(
blobs_to_delete.append(blob)
# blob should have been uploaded
assert blob.exists()


def test_blobreader_w_raw_download(
shared_bucket,
blobs_to_delete,
file_data,
):
blob = shared_bucket.blob("LargeFile")
info = file_data["big"]
with open(info["path"], "rb") as file_obj:
with blob.open("wb", chunk_size=256 * 1024, if_generation_match=0) as writer:
writer.write(file_obj.read())
blobs_to_delete.append(blob)

# Test BlobReader read and seek handles raw downloads.
with open(info["path"], "rb") as file_obj:
with blob.open("rb", chunk_size=256 * 1024, raw_download=True) as reader:
reader.seek(0)
file_obj.seek(0)
assert file_obj.read() == reader.read()
# End of file reached; further reads should be blank but not
# raise an error.
assert reader.read() == b""