From a784001bf3ceac2faa44e179c64137799f1843e6 Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Wed, 11 Feb 2026 10:20:39 +0000 Subject: [PATCH 1/4] feat(storage): add abstracts for blob and bucket --- google/cloud/storage/abstracts/base_blob.py | 243 +++++++++++ google/cloud/storage/abstracts/base_bucket.py | 391 ++++++++++++++++++ google/cloud/storage/blob.py | 3 +- google/cloud/storage/bucket.py | 3 +- 4 files changed, 638 insertions(+), 2 deletions(-) create mode 100644 google/cloud/storage/abstracts/base_blob.py create mode 100644 google/cloud/storage/abstracts/base_bucket.py diff --git a/google/cloud/storage/abstracts/base_blob.py b/google/cloud/storage/abstracts/base_blob.py new file mode 100644 index 000000000..b2a2bc07c --- /dev/null +++ b/google/cloud/storage/abstracts/base_blob.py @@ -0,0 +1,243 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The abstract for python-storage Blob.""" + +import abc + + +class BaseBlob(abc.ABC): + """The abstract for python-storage Blob""" + + @property + @abc.abstractmethod + def encryption_key(self): + """Retrieve the customer-supplied encryption key for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @encryption_key.setter + @abc.abstractmethod + def encryption_key(self, value): + """Set the blob's encryption key.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def chunk_size(self): + """Get the blob's default chunk size.""" + raise NotImplementedError("Not Yet Implemented") + + @chunk_size.setter + @abc.abstractmethod + def chunk_size(self, value): + """Set the blob's default chunk size.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def metadata(self): + """Retrieve arbitrary/application specific metadata for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @metadata.setter + @abc.abstractmethod + def metadata(self, value): + """Update arbitrary/application specific metadata for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def kms_key_name(self): + """Resource name of Cloud KMS key used to encrypt the blob's contents.""" + raise NotImplementedError("Not Yet Implemented") + + @kms_key_name.setter + @abc.abstractmethod + def kms_key_name(self, value): + """Set KMS encryption key for object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def custom_time(self): + """Retrieve the custom time for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @custom_time.setter + @abc.abstractmethod + def custom_time(self, value): + """Set the custom time for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def bucket(self): + """Bucket which contains the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def acl(self): + """Create our ACL on demand.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def path(self): + """Getter property for the URL path to this Blob.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def client(self): + """The client bound to this blob.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def user_project(self): + """Project ID billed for API requests made via this blob.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def public_url(self): + """The public URL for this blob.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def component_count(self): + """Number of underlying components that make up this object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def etag(self): + """Retrieve the ETag for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def generation(self): + """Retrieve the generation for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def id(self): + """Retrieve the ID for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def media_link(self): + """Retrieve the media download URI for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def metageneration(self): + """Retrieve the metageneration for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def owner(self): + """Retrieve info about the owner of the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def retention_expiration_time(self): + """Retrieve timestamp at which the object's retention period expires.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def self_link(self): + """Retrieve the URI for the object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def size(self): + """Size of the object, in bytes.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def time_deleted(self): + """Retrieve the timestamp at which the object was deleted.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def time_created(self): + """Retrieve the timestamp at which the object was created.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def updated(self): + """Retrieve the timestamp at which the object was updated.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def retention(self): + """Retrieve the retention configuration for this object.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def soft_delete_time(self): + """If this object has been soft-deleted, returns the time at which it became soft-deleted.""" + raise NotImplementedError("Not Yet Implemented") + + @property + @abc.abstractmethod + def hard_delete_time(self): + """If this object has been soft-deleted, returns the time at which it will be permanently deleted.""" + raise NotImplementedError("Not Yet Implemented") + + @abc.abstractmethod + def reload( + self, + client=None, + projection="noAcl", + if_etag_match=None, + if_etag_not_match=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=None, + retry=None, + soft_deleted=None, + ): + raise NotImplementedError("Not Yet Implemented.") + + @abc.abstractmethod + def open( + self, + mode="r", + chunk_size=None, + ignore_flush=None, + encoding=None, + errors=None, + newline=None, + **kwargs, + ): + """Create a file handler for file-like I/O to or from this blob.""" + raise NotImplementedError("Not Yet Implemented") diff --git a/google/cloud/storage/abstracts/base_bucket.py b/google/cloud/storage/abstracts/base_bucket.py new file mode 100644 index 000000000..3910feb1d --- /dev/null +++ b/google/cloud/storage/abstracts/base_bucket.py @@ -0,0 +1,391 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The abstract for python-storage Bucket.""" + +import abc + + +class BaseBucket(abc.ABC): + """The abstract for python-storage Bucket""" + + @property + @abc.abstractmethod + def rpo(self): + """Get the RPO (Recovery Point Objective) of this bucket""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def retention_period(self): + """Retrieve or set the retention period for items in the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @retention_period.setter + @abc.abstractmethod + def retention_period(self, value): + """Set the retention period for items in the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def storage_class(self): + """Retrieve or set the storage class for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @storage_class.setter + @abc.abstractmethod + def storage_class(self, value): + """Set the storage class for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def versioning_enabled(self): + """Is versioning enabled for this bucket?""" + raise NotImplementedError("Not yet Implemented") + + @versioning_enabled.setter + @abc.abstractmethod + def versioning_enabled(self, value): + """Enable versioning for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def requester_pays(self): + """Does the requester pay for API requests for this bucket?""" + raise NotImplementedError("Not yet Implemented") + + @requester_pays.setter + @abc.abstractmethod + def requester_pays(self, value): + """Update whether requester pays for API requests for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def autoclass_enabled(self): + """Whether Autoclass is enabled for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @autoclass_enabled.setter + @abc.abstractmethod + def autoclass_enabled(self, value): + """Enable or disable Autoclass at the bucket-level.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def autoclass_terminal_storage_class(self): + """The storage class that objects in an Autoclass bucket eventually transition to if + they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + """ + raise NotImplementedError("Not yet Implemented") + + @autoclass_terminal_storage_class.setter + @abc.abstractmethod + def autoclass_terminal_storage_class(self, value): + """The storage class that objects in an Autoclass bucket eventually transition to if + they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + """ + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def hierarchical_namespace_enabled(self): + """Whether hierarchical namespace is enabled for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @hierarchical_namespace_enabled.setter + @abc.abstractmethod + def hierarchical_namespace_enabled(self, value): + """Enable or disable hierarchical namespace at the bucket-level.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def cors(self): + """Retrieve or set CORS policies configured for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @cors.setter + @abc.abstractmethod + def cors(self, entries): + """Set CORS policies configured for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def default_kms_key_name(self): + """Retrieve / set default KMS encryption key for objects in the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @default_kms_key_name.setter + @abc.abstractmethod + def default_kms_key_name(self, value): + """Set default KMS encryption key for objects in the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def labels(self): + """Retrieve or set labels assigned to this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @labels.setter + @abc.abstractmethod + def labels(self, mapping): + """Set labels assigned to this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def ip_filter(self): + """Retrieve or set the IP Filter configuration for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @ip_filter.setter + @abc.abstractmethod + def ip_filter(self, value): + """Retrieve or set the IP Filter configuration for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def lifecycle_rules(self): + """Retrieve or set lifecycle rules configured for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @lifecycle_rules.setter + @abc.abstractmethod + def lifecycle_rules(self, rules): + """Set lifecycle rules configured for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def location(self): + """Retrieve location configured for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @location.setter + @abc.abstractmethod + def location(self, value): + """(Deprecated) Set `Bucket.location`""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def generation(self): + """Retrieve the generation for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def soft_delete_time(self): + """If this bucket has been soft-deleted, returns the time at which it became soft-deleted.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def hard_delete_time(self): + """If this bucket has been soft-deleted, returns the time at which it will be permanently deleted.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def autoclass_terminal_storage_class_update_time(self): + """The time at which the Autoclass terminal_storage_class field was last updated for this bucket""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def object_retention_mode(self): + """Retrieve the object retention mode set on the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def user_project(self): + """Project ID to be billed for API requests made via this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def autoclass_toggle_time(self): + """Retrieve the toggle time when Autoclaass was last enabled or disabled for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def time_created(self): + """Retrieve the timestamp at which the bucket was created.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def updated(self): + """Retrieve the timestamp at which the bucket was last updated.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def acl(self): + """Create our ACL on demand.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def default_object_acl(self): + """Create our defaultObjectACL on demand.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def etag(self): + """Retrieve the ETag for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def id(self): + """Retrieve the ID for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def iam_configuration(self): + """Retrieve IAM configuration for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def soft_delete_policy(self): + """Retrieve the soft delete policy for this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def data_locations(self): + """Retrieve the list of regional locations for custom dual-region buckets.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def location_type(self): + """Retrieve the location type for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def path(self): + """The URL path to this bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def metageneration(self): + """Retrieve the metageneration for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def owner(self): + """Retrieve info about the owner of the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def project_number(self): + """Retrieve the number of the project to which the bucket is assigned.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def retention_policy_effective_time(self): + """Retrieve the effective time of the bucket's retention policy.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def retention_policy_locked(self): + """Retrieve whthere the bucket's retention policy is locked.""" + raise NotImplementedError("Not yet Implemented") + + @property + @abc.abstractmethod + def self_link(self): + """Retrieve the URI for the bucket.""" + raise NotImplementedError("Not yet Implemented") + + @abc.abstractmethod + def reload( + self, + client=None, + projection="noAcl", + timeout=None, + if_etag_match=None, + if_etag_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + retry=None, + soft_deleted=None, + ): + """Load the bucket metadata into bucket instance.""" + raise NotImplementedError("Not Implemented Yet") + + @abc.abstractmethod + def patch( + self, + client=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=None, + retry=None, + ): + """Patch the bucket metadata into bucket instance.""" + raise NotImplementedError("Not Implemented Yet") + + @abc.abstractmethod + def blob( + self, + blob_name, + chunk_size=None, + encryption_key=None, + kms_key_name=None, + generation=None, + ): + """Factory constructor for blob object.""" + raise NotImplementedError("Not Implemented Yet") + + @abc.abstractmethod + def get_blob( + self, + blob_name, + client=None, + encryption_key=None, + generation=None, + if_etag_match=None, + if_etag_not_match=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=None, + retry=None, + soft_deleted=None, + **kwargs, + ): + """Get a blob object by name.""" + raise NotImplementedError("Not Implemented Yet") diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 746334d1c..ba168c298 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -80,6 +80,7 @@ from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from google.cloud.storage.fileio import BlobReader from google.cloud.storage.fileio import BlobWriter +from google.cloud.storage.abstracts.base_blob import BaseBlob _DEFAULT_CONTENT_TYPE = "application/octet-stream" @@ -150,7 +151,7 @@ _logger = logging.getLogger(__name__) -class Blob(_PropertyMixin): +class Blob(_PropertyMixin, BaseBlob): """A wrapper around Cloud Storage's concept of an ``Object``. :type name: str diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 1621f879e..73fd95f90 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -64,6 +64,7 @@ from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED +from google.cloud.storage.abstracts.base_bucket import BaseBucket _UBLA_BPO_ENABLED_MESSAGE = ( @@ -621,7 +622,7 @@ def bucket_policy_only_locked_time(self): return self.uniform_bucket_level_access_locked_time -class Bucket(_PropertyMixin): +class Bucket(_PropertyMixin, BaseBucket): """A class representing a Bucket on Cloud Storage. :type client: :class:`google.cloud.storage.client.Client` From 821e77a69a25bb2c88177cb4bcc02190d6d8e62c Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Thu, 12 Feb 2026 05:08:42 +0000 Subject: [PATCH 2/4] Add tests for abstracts --- tests/unit/abstracts/__init__.py | 0 tests/unit/abstracts/test_base_blob.py | 69 ++++++++++++++++++ tests/unit/abstracts/test_base_bucket.py | 90 ++++++++++++++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 tests/unit/abstracts/__init__.py create mode 100644 tests/unit/abstracts/test_base_blob.py create mode 100644 tests/unit/abstracts/test_base_bucket.py diff --git a/tests/unit/abstracts/__init__.py b/tests/unit/abstracts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/abstracts/test_base_blob.py b/tests/unit/abstracts/test_base_blob.py new file mode 100644 index 000000000..281c2de55 --- /dev/null +++ b/tests/unit/abstracts/test_base_blob.py @@ -0,0 +1,69 @@ +import pytest +from unittest import mock + +from google.cloud.storage.abstracts.base_blob import BaseBlob + + +@pytest.fixture +def base_blob(): + # Temporarily remove abstract methods restriction to allow direct instantiation + with mock.patch.object(BaseBlob, "__abstractmethods__", set()): + yield BaseBlob() + + +# Properties that have both getters and setters +READ_WRITE_PROPS = [ + "encryption_key", + "chunk_size", + "metadata", + "kms_key_name", + "custom_time", +] + +# Properties that only have getters +READ_ONLY_PROPS = [ + "bucket", + "acl", + "path", + "client", + "user_project", + "public_url", + "component_count", + "etag", + "generation", + "id", + "media_link", + "metageneration", + "owner", + "retention_expiration_time", + "self_link", + "size", + "time_deleted", + "time_created", + "updated", + "retention", + "soft_delete_time", + "hard_delete_time", +] + + +@pytest.mark.parametrize("prop", READ_WRITE_PROPS + READ_ONLY_PROPS) +def test_property_getters(base_blob, prop): + with pytest.raises(NotImplementedError, match="Not Yet Implemented"): + getattr(base_blob, prop) + + +@pytest.mark.parametrize("prop", READ_WRITE_PROPS) +def test_property_setters(base_blob, prop): + with pytest.raises(NotImplementedError, match="Not Yet Implemented"): + setattr(base_blob, prop, "dummy_value") + + +def test_reload(base_blob): + with pytest.raises(NotImplementedError, match="Not Yet Implemented"): + base_blob.reload() + + +def test_open(base_blob): + with pytest.raises(NotImplementedError, match="Not Yet Implemented"): + base_blob.open() diff --git a/tests/unit/abstracts/test_base_bucket.py b/tests/unit/abstracts/test_base_bucket.py new file mode 100644 index 000000000..2383d9979 --- /dev/null +++ b/tests/unit/abstracts/test_base_bucket.py @@ -0,0 +1,90 @@ +import pytest +from unittest import mock + +from google.cloud.storage.abstracts.base_bucket import BaseBucket + + +@pytest.fixture +def base_bucket(): + # Temporarily remove abstract methods restriction to allow direct instantiation + with mock.patch.object(BaseBucket, "__abstractmethods__", set()): + yield BaseBucket() + + +# Properties that have both getters and setters +READ_WRITE_PROPS = [ + "retention_period", + "storage_class", + "versioning_enabled", + "requester_pays", + "autoclass_enabled", + "autoclass_terminal_storage_class", + "hierarchical_namespace_enabled", + "cors", + "default_kms_key_name", + "labels", + "ip_filter", + "lifecycle_rules", + "location", +] + +# Properties that only have getters +READ_ONLY_PROPS = [ + "rpo", + "generation", + "soft_delete_time", + "hard_delete_time", + "autoclass_terminal_storage_class_update_time", + "object_retention_mode", + "user_project", + "autoclass_toggle_time", + "time_created", + "updated", + "acl", + "default_object_acl", + "etag", + "id", + "iam_configuration", + "soft_delete_policy", + "data_locations", + "location_type", + "path", + "metageneration", + "owner", + "project_number", + "retention_policy_effective_time", + "retention_policy_locked", + "self_link", +] + + +@pytest.mark.parametrize("prop", READ_WRITE_PROPS + READ_ONLY_PROPS) +def test_property_getters(base_bucket, prop): + with pytest.raises(NotImplementedError): + getattr(base_bucket, prop) + + +@pytest.mark.parametrize("prop", READ_WRITE_PROPS) +def test_property_setters(base_bucket, prop): + with pytest.raises(NotImplementedError): + setattr(base_bucket, prop, "dummy_value") + + +def test_reload(base_bucket): + with pytest.raises(NotImplementedError): + base_bucket.reload() + + +def test_patch(base_bucket): + with pytest.raises(NotImplementedError): + base_bucket.patch() + + +def test_blob(base_bucket): + with pytest.raises(NotImplementedError): + base_bucket.blob("dummy_blob_name") + + +def test_get_blob(base_bucket): + with pytest.raises(NotImplementedError): + base_bucket.get_blob("dummy_blob_name") From 66d626858ee6a9181f61f6127420ab6233139c1c Mon Sep 17 00:00:00 2001 From: Chandra Shekhar Sirimala Date: Wed, 11 Feb 2026 20:32:57 +0530 Subject: [PATCH 3/4] chore: remove python 3.9 support. (#1748) chore: remove python 3.9 support. Details in b/483015736 --- .github/sync-repo-settings.yaml | 2 +- .kokoro/presubmit/{system-3.9.cfg => system-3.10.cfg} | 2 +- .librarian/generator-input/noxfile.py | 7 ++----- .librarian/generator-input/setup.py | 3 --- noxfile.py | 7 +------ setup.py | 5 +---- 6 files changed, 6 insertions(+), 20 deletions(-) rename .kokoro/presubmit/{system-3.9.cfg => system-3.10.cfg} (91%) diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 19c1d0ba4..073e7d995 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -10,5 +10,5 @@ branchProtectionRules: - 'Kokoro' - 'cla/google' - 'Kokoro system-3.14' - - 'Kokoro system-3.9' + - 'Kokoro system-3.10' - 'OwlBot Post Processor' diff --git a/.kokoro/presubmit/system-3.9.cfg b/.kokoro/presubmit/system-3.10.cfg similarity index 91% rename from .kokoro/presubmit/system-3.9.cfg rename to .kokoro/presubmit/system-3.10.cfg index d21467d02..26958ac2a 100644 --- a/.kokoro/presubmit/system-3.9.cfg +++ b/.kokoro/presubmit/system-3.10.cfg @@ -3,7 +3,7 @@ # Only run this nox session. env_vars: { key: "NOX_SESSION" - value: "system-3.9" + value: "system-3.10" } # Credentials needed to test universe domain. diff --git a/.librarian/generator-input/noxfile.py b/.librarian/generator-input/noxfile.py index ca527decd..c9ada0739 100644 --- a/.librarian/generator-input/noxfile.py +++ b/.librarian/generator-input/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.14" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.9", "3.14"] -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.10", "3.14"] +UNIT_TEST_PYTHON_VERSIONS = ["3.10", "3.11", "3.12", "3.13", "3.14"] CONFORMANCE_TEST_PYTHON_VERSIONS = ["3.12"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -44,9 +44,6 @@ "lint", "lint_setup_py", "system", - # TODO(https://github.com/googleapis/python-storage/issues/1499): - # Remove or restore testing for Python 3.7/3.8 - "unit-3.9", "unit-3.10", "unit-3.11", "unit-3.12", diff --git a/.librarian/generator-input/setup.py b/.librarian/generator-input/setup.py index 89971aa33..294e63892 100644 --- a/.librarian/generator-input/setup.py +++ b/.librarian/generator-input/setup.py @@ -87,9 +87,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", diff --git a/noxfile.py b/noxfile.py index 14dfb29d0..4c2b70193 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,7 +17,6 @@ from __future__ import absolute_import import os import pathlib -import re import shutil import nox @@ -27,9 +26,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.14" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.9", "3.14"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.10", "3.14"] UNIT_TEST_PYTHON_VERSIONS = [ - "3.9", "3.10", "3.11", "3.12", @@ -51,9 +49,6 @@ "lint", "lint_setup_py", "system", - # TODO(https://github.com/googleapis/python-storage/issues/1499): - # Remove or restore testing for Python 3.7/3.8 - "unit-3.9", "unit-3.10", "unit-3.11", "unit-3.12", diff --git a/setup.py b/setup.py index b45053856..d3215cff6 100644 --- a/setup.py +++ b/setup.py @@ -99,9 +99,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -114,7 +111,7 @@ packages=packages, install_requires=dependencies, extras_require=extras, - python_requires=">=3.7", + python_requires=">=3.10", include_package_data=True, zip_safe=False, ) From dde3d1d31d420279c09d6f07ca61cac991870357 Mon Sep 17 00:00:00 2001 From: Margubur Rahman Date: Tue, 17 Feb 2026 08:17:09 +0000 Subject: [PATCH 4/4] address comments and linters --- .../_experimental/asyncio/async_client.py | 26 ++++-- .../_experimental/asyncio/async_creds.py | 10 +- .../_experimental/asyncio/async_helpers.py | 9 +- .../cloud/storage/_media/requests/download.py | 1 - google/cloud/storage/abstracts/base_client.py | 7 +- google/cloud/storage/client.py | 8 +- tests/unit/asyncio/test_async_client.py | 91 ++++++++++++------- tests/unit/asyncio/test_async_creds.py | 41 +++++---- tests/unit/asyncio/test_async_helpers.py | 59 ++++++------ tests/unit/test_blob.py | 13 ++- tests/unit/test_bucket.py | 5 + 11 files changed, 159 insertions(+), 111 deletions(-) diff --git a/google/cloud/storage/_experimental/asyncio/async_client.py b/google/cloud/storage/_experimental/asyncio/async_client.py index bd8817a09..3c4fddbca 100644 --- a/google/cloud/storage/_experimental/asyncio/async_client.py +++ b/google/cloud/storage/_experimental/asyncio/async_client.py @@ -16,10 +16,14 @@ import functools -from google.cloud.storage._experimental.asyncio.async_helpers import ASYNC_DEFAULT_TIMEOUT +from google.cloud.storage._experimental.asyncio.async_helpers import ( + ASYNC_DEFAULT_TIMEOUT, +) from google.cloud.storage._experimental.asyncio.async_helpers import ASYNC_DEFAULT_RETRY from google.cloud.storage._experimental.asyncio.async_helpers import AsyncHTTPIterator -from google.cloud.storage._experimental.asyncio.async_helpers import _do_nothing_page_start +from google.cloud.storage._experimental.asyncio.async_helpers import ( + _do_nothing_page_start, +) from google.cloud.storage._opentelemetry_tracing import create_trace_span from google.cloud.storage._experimental.asyncio.async_creds import AsyncCredsWrapper from google.cloud.storage.abstracts.base_client import BaseClient @@ -28,6 +32,7 @@ try: from google.auth.aio.transport import sessions + AsyncSession = sessions.AsyncAuthorizedSession _AIO_AVAILABLE = True except ImportError: @@ -70,12 +75,16 @@ def __init__( client_info=client_info, client_options=client_options, extra_headers=extra_headers, - api_key=api_key + api_key=api_key, ) - self.credentials = AsyncCredsWrapper(self._credentials) # self._credential is synchronous. - self._connection = AsyncConnection(self, **self.connection_kw_args) # adapter for async communication + self.credentials = AsyncCredsWrapper( + self._credentials + ) # self._credential is synchronous. + self._connection = AsyncConnection( + self, **self.connection_kw_args + ) # adapter for async communication self._async_http_internal = _async_http - self._async_http_passed_by_user = (_async_http is not None) + self._async_http_passed_by_user = _async_http is not None @property def async_http(self): @@ -86,7 +95,10 @@ def async_http(self): async def close(self): """Close the session, if it exists""" - if self._async_http_internal is not None and not self._async_http_passed_by_user: + if ( + self._async_http_internal is not None + and not self._async_http_passed_by_user + ): await self._async_http_internal.close() async def _get_resource( diff --git a/google/cloud/storage/_experimental/asyncio/async_creds.py b/google/cloud/storage/_experimental/asyncio/async_creds.py index 2fb899b19..e2abc3316 100644 --- a/google/cloud/storage/_experimental/asyncio/async_creds.py +++ b/google/cloud/storage/_experimental/asyncio/async_creds.py @@ -5,21 +5,23 @@ try: from google.auth.aio import credentials as aio_creds_module + BaseCredentials = aio_creds_module.Credentials _AIO_AVAILABLE = True except ImportError: BaseCredentials = object _AIO_AVAILABLE = False + class AsyncCredsWrapper(BaseCredentials): """Wraps synchronous Google Auth credentials to provide an asynchronous interface. Args: - sync_creds (google.auth.credentials.Credentials): The synchronous credentials + sync_creds (google.auth.credentials.Credentials): The synchronous credentials instance to wrap. Raises: - ImportError: If instantiated in an environment where 'google.auth.aio' + ImportError: If instantiated in an environment where 'google.auth.aio' is not available. """ @@ -36,9 +38,7 @@ def __init__(self, sync_creds): async def refresh(self, request): """Refreshes the access token.""" loop = asyncio.get_running_loop() - await loop.run_in_executor( - None, self.creds.refresh, Request() - ) + await loop.run_in_executor(None, self.creds.refresh, Request()) @property def valid(self): diff --git a/google/cloud/storage/_experimental/asyncio/async_helpers.py b/google/cloud/storage/_experimental/asyncio/async_helpers.py index bfebfaafa..4a7d78732 100644 --- a/google/cloud/storage/_experimental/asyncio/async_helpers.py +++ b/google/cloud/storage/_experimental/asyncio/async_helpers.py @@ -24,6 +24,7 @@ async def _do_nothing_page_start(iterator, page, response): # pylint: disable=unused-argument pass + class AsyncHTTPIterator(AsyncIterator): """A generic class for iterating through HTTP/JSON API list responses asynchronously. @@ -32,7 +33,7 @@ class AsyncHTTPIterator(AsyncIterator): api_request (Callable): The **async** function to use to make API requests. This must be an awaitable. path (str): The method path to query for the list of items. - item_to_value (Callable[AsyncIterator, Any]): Callable to convert an item + item_to_value (Callable[AsyncIterator, Any]): Callable to convert an item from the type in the JSON response into a native object. items_key (str): The key in the API response where the list of items can be found. @@ -40,7 +41,7 @@ class AsyncHTTPIterator(AsyncIterator): page_size (int): The maximum number of results to fetch per page. max_results (int): The maximum number of results to fetch. extra_params (dict): Extra query string parameters for the API call. - page_start (Callable): Callable to provide special behavior after a new page + page_start (Callable): Callable to provide special behavior after a new page is created. next_token (str): The name of the field used in the response for page tokens. """ @@ -137,6 +138,4 @@ def _get_query_params(self): async def _get_next_page_response(self): """Requests the next page from the path provided asynchronously.""" params = self._get_query_params() - return await self.api_request( - method="GET", path=self.path, query_params=params - ) + return await self.api_request(method="GET", path=self.path, query_params=params) diff --git a/google/cloud/storage/_media/requests/download.py b/google/cloud/storage/_media/requests/download.py index 13e049bd3..c5686fcb7 100644 --- a/google/cloud/storage/_media/requests/download.py +++ b/google/cloud/storage/_media/requests/download.py @@ -774,6 +774,5 @@ def flush(self): def has_unconsumed_tail(self) -> bool: return self._decoder.has_unconsumed_tail - else: # pragma: NO COVER _BrotliDecoder = None # type: ignore # pragma: NO COVER diff --git a/google/cloud/storage/abstracts/base_client.py b/google/cloud/storage/abstracts/base_client.py index c2030cb89..ce89a8bec 100644 --- a/google/cloud/storage/abstracts/base_client.py +++ b/google/cloud/storage/abstracts/base_client.py @@ -30,6 +30,7 @@ marker = object() + class BaseClient(ClientWithProject, ABC): """Abstract class for python-storage Client""" @@ -248,7 +249,7 @@ def _connection(self, value): """ if self._base_connection is not None: raise ValueError("Connection already set on client") - self._base_connection = value + self._base_connection = value @property def _use_client_cert(self): @@ -260,9 +261,7 @@ def _use_client_cert(self): if hasattr(mtls, "should_use_client_cert"): use_client_cert = mtls.should_use_client_cert() else: - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE") == "true" - ) + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE") == "true" return use_client_cert def _push_batch(self, batch): diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index afa0b3a4a..4a2c623e9 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -50,6 +50,7 @@ _marker = base_client.marker + def _buckets_page_start(iterator, page, response): """Grab unreachable buckets after a :class:`~google.cloud.iterator.Page` started.""" unreachable = response.get("unreachable", []) @@ -139,15 +140,16 @@ def __init__( client_options=client_options, use_auth_w_custom_endpoint=use_auth_w_custom_endpoint, extra_headers=extra_headers, - api_key=api_key + api_key=api_key, ) # Pass extra_headers to Connection - connection = Connection(self, **self.connection_kw_args) # connection_kw_args would always be set in base class + connection = Connection( + self, **self.connection_kw_args + ) # connection_kw_args would always be set in base class connection.extra_headers = extra_headers self._connection = connection - def get_service_account_email( self, project=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY ): diff --git a/tests/unit/asyncio/test_async_client.py b/tests/unit/asyncio/test_async_client.py index 64481a0d4..e7e232425 100644 --- a/tests/unit/asyncio/test_async_client.py +++ b/tests/unit/asyncio/test_async_client.py @@ -31,7 +31,7 @@ def _make_credentials(): @pytest.mark.skipif( sys.version_info < (3, 10), - reason="Async Client requires Python 3.10+ due to google-auth-library asyncio support" + reason="Async Client requires Python 3.10+ due to google-auth-library asyncio support", ) class TestAsyncClient: @staticmethod @@ -46,7 +46,9 @@ def test_ctor_defaults(self): credentials = _make_credentials() # We mock AsyncConnection to prevent network logic during init - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection") as MockConn: + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ) as MockConn: client = self._make_one(project=PROJECT, credentials=credentials) assert client.project == PROJECT @@ -66,21 +68,26 @@ def test_ctor_mtls_raises_error(self): credentials = _make_credentials() # Simulate environment where mTLS is enabled - with mock.patch("google.cloud.storage.abstracts.base_client.BaseClient._use_client_cert", new_callable=mock.PropertyMock) as mock_mtls: + with mock.patch( + "google.cloud.storage.abstracts.base_client.BaseClient._use_client_cert", + new_callable=mock.PropertyMock, + ) as mock_mtls: mock_mtls.return_value = True - with pytest.raises(ValueError, match="Async Client currently do not support mTLS"): + with pytest.raises( + ValueError, match="Async Client currently do not support mTLS" + ): self._make_one(credentials=credentials) def test_ctor_w_async_http_passed(self): credentials = _make_credentials() async_http = mock.Mock() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one( - project="PROJECT", - credentials=credentials, - _async_http=async_http + project="PROJECT", credentials=credentials, _async_http=async_http ) assert client._async_http_internal is async_http @@ -88,13 +95,17 @@ def test_ctor_w_async_http_passed(self): def test_async_http_property_creates_session(self): credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) assert client._async_http_internal is None # Mock the auth session class - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncSession") as MockSession: + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncSession" + ) as MockSession: session = client.async_http assert session is MockSession.return_value @@ -102,12 +113,14 @@ def test_async_http_property_creates_session(self): # Should be initialized with the AsyncCredsWrapper, not the raw credentials MockSession.assert_called_once() call_kwargs = MockSession.call_args[1] - assert call_kwargs['credentials'] == client.credentials + assert call_kwargs["credentials"] == client.credentials @pytest.mark.asyncio async def test_close_manages_session_lifecycle(self): credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) # 1. Internal session created by client -> Client closes it @@ -123,11 +136,11 @@ async def test_close_ignores_user_session(self): credentials = _make_credentials() user_session = mock.AsyncMock() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one( - project="PROJECT", - credentials=credentials, - _async_http=user_session + project="PROJECT", credentials=credentials, _async_http=user_session ) # 2. External session passed by user -> Client DOES NOT close it @@ -140,12 +153,13 @@ async def test_get_resource(self): query_params = {"foo": "bar"} credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) # Mock the connection's api_request - client._connection.api_request = mock.AsyncMock( - return_value="response") + client._connection.api_request = mock.AsyncMock(return_value="response") result = await client._get_resource(path, query_params=query_params) @@ -157,7 +171,7 @@ async def test_get_resource(self): headers=None, timeout=mock.ANY, retry=mock.ANY, - _target_object=None + _target_object=None, ) @pytest.mark.asyncio @@ -166,14 +180,13 @@ async def test_list_resource(self): item_to_value = mock.Mock() credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) iterator = client._list_resource( - path=path, - item_to_value=item_to_value, - max_results=10, - page_token="token" + path=path, item_to_value=item_to_value, max_results=10, page_token="token" ) assert isinstance(iterator, AsyncHTTPIterator) @@ -186,7 +199,9 @@ async def test_patch_resource(self): data = {"key": "val"} credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) client._connection.api_request = mock.AsyncMock() @@ -201,7 +216,7 @@ async def test_patch_resource(self): headers=None, timeout=mock.ANY, retry=None, - _target_object=None + _target_object=None, ) @pytest.mark.asyncio @@ -210,7 +225,9 @@ async def test_put_resource(self): data = b"bytes" credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) client._connection.api_request = mock.AsyncMock() @@ -225,7 +242,7 @@ async def test_put_resource(self): headers=None, timeout=mock.ANY, retry=None, - _target_object=None + _target_object=None, ) @pytest.mark.asyncio @@ -234,7 +251,9 @@ async def test_post_resource(self): data = {"source": []} credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) client._connection.api_request = mock.AsyncMock() @@ -249,7 +268,7 @@ async def test_post_resource(self): headers=None, timeout=mock.ANY, retry=None, - _target_object=None + _target_object=None, ) @pytest.mark.asyncio @@ -257,7 +276,9 @@ async def test_delete_resource(self): path = "/b/bucket" credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) client._connection.api_request = mock.AsyncMock() @@ -271,12 +292,14 @@ async def test_delete_resource(self): headers=None, timeout=mock.ANY, retry=mock.ANY, - _target_object=None + _target_object=None, ) def test_bucket_not_implemented(self): credentials = _make_credentials() - with mock.patch("google.cloud.storage._experimental.asyncio.async_client.AsyncConnection"): + with mock.patch( + "google.cloud.storage._experimental.asyncio.async_client.AsyncConnection" + ): client = self._make_one(project="PROJECT", credentials=credentials) with pytest.raises(NotImplementedError): diff --git a/tests/unit/asyncio/test_async_creds.py b/tests/unit/asyncio/test_async_creds.py index 0a45bca5d..3dad11fd0 100644 --- a/tests/unit/asyncio/test_async_creds.py +++ b/tests/unit/asyncio/test_async_creds.py @@ -4,28 +4,30 @@ from google.auth import credentials as google_creds from google.cloud.storage._experimental.asyncio import async_creds + @pytest.fixture def mock_aio_modules(): """Patches sys.modules to simulate google.auth.aio existence.""" mock_creds_module = unittest.mock.MagicMock() # We must set the base class to object so our wrapper can inherit safely in tests - mock_creds_module.Credentials = object - + mock_creds_module.Credentials = object + modules = { - 'google.auth.aio': unittest.mock.MagicMock(), - 'google.auth.aio.credentials': mock_creds_module, + "google.auth.aio": unittest.mock.MagicMock(), + "google.auth.aio.credentials": mock_creds_module, } - + with unittest.mock.patch.dict(sys.modules, modules): # We also need to manually flip the flag in the module to True for the test context # because the module was likely already imported with the flag set to False/True # depending on the real environment. - with unittest.mock.patch.object(async_creds, '_AIO_AVAILABLE', True): + with unittest.mock.patch.object(async_creds, "_AIO_AVAILABLE", True): # We also need to ensure BaseCredentials in the module points to our mock # if we want strictly correct inheritance, though duck typing usually suffices. - with unittest.mock.patch.object(async_creds, 'BaseCredentials', object): + with unittest.mock.patch.object(async_creds, "BaseCredentials", object): yield + @pytest.fixture def mock_sync_creds(): """Creates a mock of the synchronous Google Credentials object.""" @@ -33,14 +35,15 @@ def mock_sync_creds(): type(creds).valid = unittest.mock.PropertyMock(return_value=True) return creds + @pytest.fixture def async_wrapper(mock_aio_modules, mock_sync_creds): """Instantiates the wrapper with the mock credentials.""" # This instantiation would raise ImportError if mock_aio_modules didn't set _AIO_AVAILABLE=True return async_creds.AsyncCredsWrapper(mock_sync_creds) + class TestAsyncCredsWrapper: - @pytest.mark.asyncio async def test_init_sets_attributes(self, async_wrapper, mock_sync_creds): """Test that the wrapper initializes correctly.""" @@ -51,19 +54,19 @@ async def test_valid_property_delegates(self, async_wrapper, mock_sync_creds): """Test that the .valid property maps to the sync creds .valid property.""" type(mock_sync_creds).valid = unittest.mock.PropertyMock(return_value=True) assert async_wrapper.valid is True - + type(mock_sync_creds).valid = unittest.mock.PropertyMock(return_value=False) assert async_wrapper.valid is False @pytest.mark.asyncio async def test_refresh_offloads_to_executor(self, async_wrapper, mock_sync_creds): - """Test that refresh() gets the running loop and calls sync refresh in executor.""" - with unittest.mock.patch('asyncio.get_running_loop') as mock_get_loop: + """Test that refresh() gets the running loop and calls sync refresh in executor.""" + with unittest.mock.patch("asyncio.get_running_loop") as mock_get_loop: mock_loop = unittest.mock.AsyncMock() mock_get_loop.return_value = mock_loop - + await async_wrapper.refresh(None) - + mock_loop.run_in_executor.assert_called_once() args, _ = mock_loop.run_in_executor.call_args assert args[1] == mock_sync_creds.refresh @@ -72,10 +75,10 @@ async def test_refresh_offloads_to_executor(self, async_wrapper, mock_sync_creds async def test_before_request_valid_creds(self, async_wrapper, mock_sync_creds): """Test before_request when credentials are ALREADY valid.""" type(mock_sync_creds).valid = unittest.mock.PropertyMock(return_value=True) - + headers = {} await async_wrapper.before_request(None, "GET", "http://example.com", headers) - + mock_sync_creds.apply.assert_called_once_with(headers) mock_sync_creds.before_request.assert_not_called() @@ -83,12 +86,12 @@ async def test_before_request_valid_creds(self, async_wrapper, mock_sync_creds): async def test_before_request_invalid_creds(self, async_wrapper, mock_sync_creds): """Test before_request when credentials are INVALID (refresh path).""" type(mock_sync_creds).valid = unittest.mock.PropertyMock(return_value=False) - + headers = {} method = "GET" url = "http://example.com" - with unittest.mock.patch('asyncio.get_running_loop') as mock_get_loop: + with unittest.mock.patch("asyncio.get_running_loop") as mock_get_loop: mock_loop = unittest.mock.AsyncMock() mock_get_loop.return_value = mock_loop @@ -101,8 +104,8 @@ async def test_before_request_invalid_creds(self, async_wrapper, mock_sync_creds def test_missing_aio_raises_error(self, mock_sync_creds): """Ensure ImportError is raised if _AIO_AVAILABLE is False.""" # We manually simulate the environment where AIO is missing - with unittest.mock.patch.object(async_creds, '_AIO_AVAILABLE', False): + with unittest.mock.patch.object(async_creds, "_AIO_AVAILABLE", False): with pytest.raises(ImportError) as excinfo: async_creds.AsyncCredsWrapper(mock_sync_creds) - + assert "Failed to import 'google.auth.aio'" in str(excinfo.value) diff --git a/tests/unit/asyncio/test_async_helpers.py b/tests/unit/asyncio/test_async_helpers.py index 58ebbea31..d125f2b57 100644 --- a/tests/unit/asyncio/test_async_helpers.py +++ b/tests/unit/asyncio/test_async_helpers.py @@ -27,7 +27,6 @@ async def _safe_anext(iterator): class TestAsyncHTTPIterator: - def _make_one(self, *args, **kw): return AsyncHTTPIterator(*args, **kw) @@ -35,11 +34,9 @@ def _make_one(self, *args, **kw): async def test_iterate_items_single_page(self): """Test simple iteration over one page of results.""" client = mock.Mock() - api_request = mock.AsyncMock() - api_request.return_value = { - "items": ["a", "b"] - } - + api_request = mock.AsyncMock() + api_request.return_value = {"items": ["a", "b"]} + iterator = self._make_one( client=client, api_request=api_request, @@ -53,11 +50,9 @@ async def test_iterate_items_single_page(self): assert results == ["A", "B"] assert iterator.num_results == 2 - assert iterator.page_number == 1 + assert iterator.page_number == 1 api_request.assert_awaited_once_with( - method="GET", - path="/path", - query_params={} + method="GET", path="/path", query_params={} ) @pytest.mark.asyncio @@ -65,14 +60,14 @@ async def test_iterate_items_multiple_pages(self): """Test pagination flow passes tokens correctly.""" client = mock.Mock() api_request = mock.AsyncMock() - + # Setup Response: 2 Pages api_request.side_effect = [ - {"items": ["1", "2"], "nextPageToken": "token-A"}, # Page 1 - {"items": ["3"], "nextPageToken": "token-B"}, # Page 2 - {"items": []} # Page 3 (Empty/End) + {"items": ["1", "2"], "nextPageToken": "token-A"}, # Page 1 + {"items": ["3"], "nextPageToken": "token-B"}, # Page 2 + {"items": []}, # Page 3 (Empty/End) ] - + iterator = self._make_one( client=client, api_request=api_request, @@ -84,7 +79,7 @@ async def test_iterate_items_multiple_pages(self): assert results == [1, 2, 3] assert api_request.call_count == 3 - + calls = api_request.call_args_list assert calls[0].kwargs["query_params"] == {} assert calls[1].kwargs["query_params"] == {"pageToken": "token-A"} @@ -95,12 +90,12 @@ async def test_iterate_pages_public_property(self): """Test the .pages property which yields Page objects instead of items.""" client = mock.Mock() api_request = mock.AsyncMock() - + api_request.side_effect = [ {"items": ["a"], "nextPageToken": "next"}, - {"items": ["b"]} + {"items": ["b"]}, ] - + iterator = self._make_one( client=client, api_request=api_request, @@ -115,7 +110,7 @@ async def test_iterate_pages_public_property(self): assert len(pages) == 2 assert list(pages[0]) == ["a"] - assert list(pages[1]) == ["b"] + assert list(pages[1]) == ["b"] assert iterator.page_number == 2 @pytest.mark.asyncio @@ -123,7 +118,7 @@ async def test_max_results_limits_requests(self): """Test that max_results alters the request parameters dynamically.""" client = mock.Mock() api_request = mock.AsyncMock() - + # Setup: We want 5 items total. # Page 1 returns 3 items. # Page 2 *should* only be asked for 2 items. @@ -131,24 +126,24 @@ async def test_max_results_limits_requests(self): {"items": ["a", "b", "c"], "nextPageToken": "t1"}, {"items": ["d", "e"], "nextPageToken": "t2"}, ] - + iterator = self._make_one( client=client, api_request=api_request, path="/path", item_to_value=lambda _, x: x, - max_results=5 # <--- Limit set here + max_results=5, # <--- Limit set here ) results = [i async for i in iterator] assert len(results) == 5 assert results == ["a", "b", "c", "d", "e"] - + # Verify Request 1: Asked for max 5 call1_params = api_request.call_args_list[0].kwargs["query_params"] assert call1_params["maxResults"] == 5 - + # Verify Request 2: Asked for max 2 (5 - 3 already fetched) call2_params = api_request.call_args_list[1].kwargs["query_params"] assert call2_params["maxResults"] == 2 @@ -159,15 +154,15 @@ async def test_extra_params_passthrough(self): """Test that extra_params are merged into every request.""" client = mock.Mock() api_request = mock.AsyncMock(return_value={"items": []}) - + custom_params = {"projection": "full", "delimiter": "/"} - + iterator = self._make_one( client=client, api_request=api_request, path="/path", item_to_value=mock.Mock(), - extra_params=custom_params # <--- Input + extra_params=custom_params, # <--- Input ) # Trigger a request @@ -183,13 +178,13 @@ async def test_page_size_configuration(self): """Test that page_size is sent as maxResults if no global max_results is set.""" client = mock.Mock() api_request = mock.AsyncMock(return_value={"items": []}) - + iterator = self._make_one( client=client, api_request=api_request, path="/path", item_to_value=mock.Mock(), - page_size=50 # <--- User preference + page_size=50, # <--- User preference ) await _safe_anext(iterator) @@ -210,7 +205,7 @@ async def test_page_start_callback(self): api_request=api_request, path="/path", item_to_value=lambda _, x: x, - page_start=callback + page_start=callback, ) # Run iteration @@ -258,7 +253,7 @@ async def test_error_if_iterated_twice(self): # First Start async for _ in iterator: pass - + # Second Start (Should Fail) with pytest.raises(ValueError, match="Iterator has already started"): async for _ in iterator: diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index cbf53b398..a02f347ba 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -34,6 +34,7 @@ from google.cloud.storage._helpers import _DEFAULT_UNIVERSE_DOMAIN from google.cloud.storage._helpers import _NOW from google.cloud.storage._helpers import _UTC +from google.cloud.storage.abstracts.base_blob import BaseBlob from google.cloud.storage.exceptions import DataCorruption from google.cloud.storage.exceptions import InvalidResponse from google.cloud.storage.retry import DEFAULT_RETRY @@ -71,6 +72,10 @@ def _make_client(*args, **kw): kw["api_endpoint"] = kw.get("api_endpoint") or _get_default_storage_base_url() return mock.create_autospec(Client, instance=True, **kw) + def test_inherit_from_base_blob(self): + blob = self._make_one("random-blob", bucket=_Bucket()) + self.assertEqual(isinstance(blob, BaseBlob), True) + def test_ctor_wo_encryption_key(self): BLOB_NAME = "blob-name" bucket = _Bucket() @@ -3064,7 +3069,13 @@ def _make_resumable_transport( fake_response2 = self._mock_requests_response( http.client.PERMANENT_REDIRECT, headers2 ) - json_body = json.dumps({"size": str(total_bytes), "md5Hash": md5_checksum_value, "crc32c": crc32c_checksum_value}) + json_body = json.dumps( + { + "size": str(total_bytes), + "md5Hash": md5_checksum_value, + "crc32c": crc32c_checksum_value, + } + ) if data_corruption: fake_response3 = DataCorruption(None) else: diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 850e89d04..3e15bc409 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -27,6 +27,7 @@ from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_INHERITED from google.cloud.storage.constants import PUBLIC_ACCESS_PREVENTION_UNSPECIFIED from google.cloud.storage.constants import RPO_DEFAULT +from google.cloud.storage.abstracts.base_bucket import BaseBucket from google.cloud.storage.constants import RPO_ASYNC_TURBO from google.cloud.storage._helpers import _NOW from google.cloud.storage._helpers import _UTC @@ -624,6 +625,10 @@ def _make_one( bucket._properties = {**bucket._properties, **properties} return bucket + def test_inherit_from_base_bucket(self): + bucket = self._make_one(name="random-bucket") + self.assertEqual(isinstance(bucket, BaseBucket), True) + def test_ctor_w_invalid_name(self): NAME = "#invalid" with self.assertRaises(ValueError):