From 11f6024a4fd0a66e8cdcc6c89c3d33534892386d Mon Sep 17 00:00:00 2001 From: cojenco Date: Tue, 6 Jun 2023 15:57:00 -0700 Subject: [PATCH] docs: add clarification to batch module (#1045) * docs: add clarification to batch module * clarify constraints with batch * update docs --- google/cloud/storage/batch.py | 22 +++++++++++++++++++++- google/cloud/storage/blob.py | 4 ++++ google/cloud/storage/bucket.py | 13 ++++++++++--- samples/snippets/storage_batch_request.py | 9 ++++++++- 4 files changed, 43 insertions(+), 5 deletions(-) diff --git a/google/cloud/storage/batch.py b/google/cloud/storage/batch.py index 54ef55cd3..03a27fc23 100644 --- a/google/cloud/storage/batch.py +++ b/google/cloud/storage/batch.py @@ -13,7 +13,21 @@ # limitations under the License. """Batch updates / deletes of storage buckets / blobs. -See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch +A batch request is a single standard HTTP request containing multiple Cloud Storage JSON API calls. +Within this main HTTP request, there are multiple parts which each contain a nested HTTP request. +The body of each part is itself a complete HTTP request, with its own verb, URL, headers, and body. + +Note that Cloud Storage does not support batch operations for uploading or downloading. +Additionally, the current batch design does not support library methods whose return values +depend on the response payload. See more details in the [Sending Batch Requests official guide](https://cloud.google.com/storage/docs/batch). + +Examples of situations when you might want to use the Batch module: +``blob.patch()`` +``blob.update()`` +``blob.delete()`` +``bucket.delete_blob()`` +``bucket.patch()`` +``bucket.update()`` """ from email.encoders import encode_noop from email.generator import Generator @@ -131,6 +145,12 @@ def content(self): class Batch(Connection): """Proxy an underlying connection, batching up change operations. + .. warning:: + + Cloud Storage does not support batch operations for uploading or downloading. + Additionally, the current batch design does not support library methods whose + return values depend on the response payload. + :type client: :class:`google.cloud.storage.client.Client` :param client: The client to use for making connections. diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 8a3f61c72..0d663e775 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -3450,6 +3450,10 @@ def rewrite( If :attr:`user_project` is set on the bucket, bills the API request to that project. + .. note:: + + ``rewrite`` is not supported in a ``Batch`` context. + :type source: :class:`Blob` :param source: blob whose contents will be rewritten into this blob. diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index ea52f3b5e..0f615f843 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1482,7 +1482,8 @@ def delete( If ``force=True`` and the bucket contains more than 256 objects / blobs this will cowardly refuse to delete the objects (or the bucket). This is to prevent accidental bucket deletion and to prevent extremely long - runtime of this method. + runtime of this method. Also note that ``force=True`` is not supported + in a ``Batch`` context. If :attr:`user_project` is set, bills the API request to that project. @@ -1675,6 +1676,7 @@ def delete_blobs( Called once for each blob raising :class:`~google.cloud.exceptions.NotFound`; otherwise, the exception is propagated. + Note that ``on_error`` is not supported in a ``Batch`` context. :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) The client to use. If not passed, falls back @@ -1801,6 +1803,8 @@ def copy_blob( :param preserve_acl: DEPRECATED. This argument is not functional! (Optional) Copies ACL from old blob to new blob. Default: True. + Note that ``preserve_acl`` is not supported in a + ``Batch`` context. :type source_generation: long :param source_generation: (Optional) The generation of the blob to be @@ -1932,8 +1936,11 @@ def rename_blob( old blob. This means that with very large objects renaming could be a very (temporarily) costly or a very slow operation. If you need more control over the copy and deletion, instead - use `google.cloud.storage.blob.Blob.copy_to` and - `google.cloud.storage.blob.Blob.delete` directly. + use ``google.cloud.storage.blob.Blob.copy_to`` and + ``google.cloud.storage.blob.Blob.delete`` directly. + + Also note that this method is not fully supported in a + ``Batch`` context. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be renamed. diff --git a/samples/snippets/storage_batch_request.py b/samples/snippets/storage_batch_request.py index 863fc09cd..7fe11fb1c 100644 --- a/samples/snippets/storage_batch_request.py +++ b/samples/snippets/storage_batch_request.py @@ -28,7 +28,14 @@ def batch_request(bucket_name, prefix=None): - """Use a batch request to patch a list of objects with the given prefix in a bucket.""" + """ + Use a batch request to patch a list of objects with the given prefix in a bucket. + + Note that Cloud Storage does not support batch operations for uploading or downloading. + Additionally, the current batch design does not support library methods whose return values + depend on the response payload. + See https://cloud.google.com/python/docs/reference/storage/latest/google.cloud.storage.batch + """ # The ID of your GCS bucket # bucket_name = "my-bucket" # The prefix of the object paths