Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
0452b94
feat(samples): add samples for appendable objects writes and reads
chandra-siri Jan 13, 2026
45992f1
Merge branch 'main' into samples
chandra-siri Jan 16, 2026
c55e7ac
Merge branch 'main' of github.com:googleapis/python-storage into samples
chandra-siri Jan 16, 2026
438fe94
fix lint issues
chandra-siri Jan 16, 2026
f762fe8
Merge branch 'samples' of github.com:googleapis/python-storage into s…
chandra-siri Jan 16, 2026
f880cf0
Apply suggestion from @gemini-code-assist[bot]
chandra-siri Jan 16, 2026
925c3ab
encapsulate open-close in try-finally block
chandra-siri Jan 16, 2026
756968b
encapsulate open-close in try-finally block
chandra-siri Jan 16, 2026
98b3e22
encapsulate open-close in try-finally block
chandra-siri Jan 16, 2026
92576ad
encapsulate open-close in try-finally block
chandra-siri Jan 16, 2026
2943eb1
gemini-code-assist suggestions
chandra-siri Jan 16, 2026
d4f0d2f
gemini-code-assist suggestions
chandra-siri Jan 16, 2026
c9a4d19
Refactor comments and enhance error handling in storage functions
chandra-siri Jan 16, 2026
adee129
Enhance appendable object handling and improve documentation in sampl…
chandra-siri Jan 16, 2026
4d92ca2
Refactor code formatting for consistency in sample snippets
chandra-siri Jan 16, 2026
0d352e3
rename rapid/ to zonal_buckets/
chandra-siri Jan 16, 2026
3ebcd7f
Add system tests for zonal buckets samples
chandra-siri Jan 16, 2026
bc6d940
Merge branch 'main' of github.com:googleapis/python-storage into samples
chandra-siri Jan 16, 2026
d577408
feat(AsyncWriteObjectStream): enhance open method docstring to clarif…
chandra-siri Jan 16, 2026
4fc99f4
style: format function signatures for better readability in zonal buc…
chandra-siri Jan 16, 2026
1e473f6
test: add pytest command for zonal snippets tests in CI script
chandra-siri Jan 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cloudbuild/run_zonal_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,4 @@ export RUN_ZONAL_SYSTEM_TESTS=True
CURRENT_ULIMIT=$(ulimit -n)
echo '--- Running Zonal tests on VM with ulimit set to ---' $CURRENT_ULIMIT
pytest -vv -s --log-format='%(asctime)s %(levelname)s %(message)s' --log-date-format='%H:%M:%S' tests/system/test_zonal.py
pytest -vv -s --log-format='%(asctime)s %(levelname)s %(message)s' --log-date-format='%H:%M:%S' samples/snippets/zonal_buckets/zonal_snippets_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,16 @@ def __init__(
self.object_resource: Optional[_storage_v2.Object] = None

async def open(self) -> None:
"""Opening an object for write , should do it's state lookup
to know what's the persisted size is.
"""
Opens the bidi-gRPC connection to write to the object.

This method sends an initial request to start the stream and receives
the first response containing metadata and a write handle.

:rtype: None
:raises ValueError: If the stream is already open.
:raises google.api_core.exceptions.FailedPrecondition:
if `generation_number` is 0 and object already exists.
"""
if self._is_stream_open:
raise ValueError("Stream is already open")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/usr/bin/env python

# Copyright 2026 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import asyncio

from google.cloud.storage._experimental.asyncio.async_appendable_object_writer import (
AsyncAppendableObjectWriter,
)
from google.cloud.storage._experimental.asyncio.async_grpc_client import AsyncGrpcClient


# [START storage_create_and_write_appendable_object]


async def storage_create_and_write_appendable_object(
bucket_name, object_name, grpc_client=None
):
"""Uploads an appendable object to zonal bucket.

grpc_client: an existing grpc_client to use, this is only for testing.
"""

if grpc_client is None:
grpc_client = AsyncGrpcClient().grpc_client
writer = AsyncAppendableObjectWriter(
client=grpc_client,
bucket_name=bucket_name,
object_name=object_name,
generation=0, # throws `FailedPrecondition` if object already exists.
)
# This creates a new appendable object of size 0 and opens it for appending.
await writer.open()

# appends data to the object
# you can perform `.append` multiple times as needed. Data will be appended
# to the end of the object.
await writer.append(b"Some data")

# Once all appends are done, close the gRPC bidirectional stream.
await writer.close()

print(
f"Appended object {object_name} created of size {writer.persisted_size} bytes."
)


# [END storage_create_and_write_appendable_object]

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("--bucket_name", help="Your Cloud Storage bucket name.")
parser.add_argument("--object_name", help="Your Cloud Storage object name.")

args = parser.parse_args()

asyncio.run(
storage_create_and_write_appendable_object(
bucket_name=args.bucket_name,
object_name=args.object_name,
)
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#!/usr/bin/env python

# Copyright 2026 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import asyncio

from google.cloud.storage._experimental.asyncio.async_appendable_object_writer import (
AsyncAppendableObjectWriter,
)
from google.cloud.storage._experimental.asyncio.async_grpc_client import AsyncGrpcClient


# [START storage_finalize_appendable_object_upload]
async def storage_finalize_appendable_object_upload(
bucket_name, object_name, grpc_client=None
):
"""Creates, writes to, and finalizes an appendable object.

grpc_client: an existing grpc_client to use, this is only for testing.
"""

if grpc_client is None:
grpc_client = AsyncGrpcClient().grpc_client
writer = AsyncAppendableObjectWriter(
client=grpc_client,
bucket_name=bucket_name,
object_name=object_name,
generation=0, # throws `FailedPrecondition` if object already exists.
)
# This creates a new appendable object of size 0 and opens it for appending.
await writer.open()

# Appends data to the object.
await writer.append(b"Some data")

# finalize the appendable object,
# NOTE: once finalized no more appends can be done to the object.
# if you don't want to finalize, you can simply close the writer as
# shown in `storage_create_and_write_appendable_object.py` snippet.
object_resource = await writer.finalize()

print(f"Appendable object {object_name} created and finalized.")
print("Object Metadata:")
print(object_resource)


# [END storage_finalize_appendable_object_upload]

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("--bucket_name", help="Your Cloud Storage bucket name.")
parser.add_argument("--object_name", help="Your Cloud Storage object name.")

args = parser.parse_args()

asyncio.run(
storage_finalize_appendable_object_upload(
bucket_name=args.bucket_name,
object_name=args.object_name,
)
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
#!/usr/bin/env python

# Copyright 2026 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Downloads a range of bytes from multiple objects concurrently."""
import argparse
import asyncio
from io import BytesIO

from google.cloud.storage._experimental.asyncio.async_grpc_client import (
AsyncGrpcClient,
)
from google.cloud.storage._experimental.asyncio.async_multi_range_downloader import (
AsyncMultiRangeDownloader,
)


# [START storage_open_multiple_objects_ranged_read]
async def storage_open_multiple_objects_ranged_read(
bucket_name, object_names, grpc_client=None
):
"""Downloads a range of bytes from multiple objects concurrently.

grpc_client: an existing grpc_client to use, this is only for testing.
"""
if grpc_client is None:
grpc_client = AsyncGrpcClient().grpc_client

async def _download_range(object_name):
"""Helper coroutine to download a range from a single object."""
mrd = AsyncMultiRangeDownloader(grpc_client, bucket_name, object_name)
try:
# Open the object, mrd always opens in read mode.
await mrd.open()

# Each object downloads the first 100 bytes.
start_byte = 0
size = 100

# requested range will be downloaded into this buffer, user may provide
# their own buffer or file-like object.
output_buffer = BytesIO()
await mrd.download_ranges([(start_byte, size, output_buffer)])
finally:
if mrd.is_stream_open:
await mrd.close()

# Downloaded size can differ from requested size if object is smaller.
# mrd will download at most up to the end of the object.
downloaded_size = output_buffer.getbuffer().nbytes
print(f"Downloaded {downloaded_size} bytes from {object_name}")

download_tasks = [_download_range(name) for name in object_names]
await asyncio.gather(*download_tasks)


# [END storage_open_multiple_objects_ranged_read]


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("--bucket_name", help="Your Cloud Storage bucket name.")
parser.add_argument(
"--object_names", nargs="+", help="Your Cloud Storage object name(s)."
)

args = parser.parse_args()

asyncio.run(
storage_open_multiple_objects_ranged_read(args.bucket_name, args.object_names)
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
#!/usr/bin/env python

# Copyright 2026 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
import asyncio
from io import BytesIO

from google.cloud.storage._experimental.asyncio.async_grpc_client import AsyncGrpcClient
from google.cloud.storage._experimental.asyncio.async_multi_range_downloader import (
AsyncMultiRangeDownloader,
)


# [START storage_open_object_multiple_ranged_read]
async def storage_open_object_multiple_ranged_read(
bucket_name, object_name, grpc_client=None
):
"""Downloads multiple ranges of bytes from a single object into different buffers.

grpc_client: an existing grpc_client to use, this is only for testing.
"""
if grpc_client is None:
grpc_client = AsyncGrpcClient().grpc_client

mrd = AsyncMultiRangeDownloader(grpc_client, bucket_name, object_name)

try:
# Open the object, mrd always opens in read mode.
await mrd.open()

# Specify four different buffers to download ranges into.
buffers = [BytesIO(), BytesIO(), BytesIO(), BytesIO()]

# Define the ranges to download. Each range is a tuple of (start_byte, size, buffer).
# All ranges will download 10 bytes from different starting positions.
# We choose arbitrary start bytes for this example. An object should be large enough.
# A user can choose any start byte between 0 and `object_size`.
# If `start_bytes` is greater than `object_size`, mrd will throw an error.
ranges = [
(0, 10, buffers[0]),
(20, 10, buffers[1]),
(40, 10, buffers[2]),
(60, 10, buffers[3]),
]

await mrd.download_ranges(ranges)

finally:
await mrd.close()

# Print the downloaded content from each buffer.
for i, output_buffer in enumerate(buffers):
downloaded_size = output_buffer.getbuffer().nbytes
print(
f"Downloaded {downloaded_size} bytes into buffer {i + 1} from start byte {ranges[i][0]}: {output_buffer.getvalue()}"
)


# [END storage_open_object_multiple_ranged_read]

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("--bucket_name", help="Your Cloud Storage bucket name.")
parser.add_argument("--object_name", help="Your Cloud Storage object name.")

args = parser.parse_args()

asyncio.run(
storage_open_object_multiple_ranged_read(args.bucket_name, args.object_name)
)
Loading