diff --git a/docs/snippets.py b/docs/snippets.py
index 89f92a20b..85233fa0a 100644
--- a/docs/snippets.py
+++ b/docs/snippets.py
@@ -260,9 +260,7 @@ def policy_document(client):
# Generate an upload form using the form fields.
policy_fields = "".join(
- ''.format(
- key=key, value=value
- )
+ f''
for key, value in policy.items()
)
@@ -301,13 +299,13 @@ def main():
client = storage.Client()
for example in _find_examples():
to_delete = []
- print("%-25s: %s" % _name_and_doc(example))
+ print(f"%-25s: {_name_and_doc(example)}")
try:
example(client, to_delete)
except AssertionError as failure:
- print(" FAIL: %s" % (failure,))
+ print(f" FAIL: {failure}")
except Exception as error: # pylint: disable=broad-except
- print(" ERROR: %r" % (error,))
+ print(f" ERROR: {error!r}")
for item in to_delete:
item.delete()
diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py
index cc85525d8..282d9bcfb 100644
--- a/google/cloud/storage/_helpers.py
+++ b/google/cloud/storage/_helpers.py
@@ -519,14 +519,12 @@ def _raise_if_more_than_one_set(**kwargs):
:raises: :class:`~ValueError` containing the fields that were set
"""
if sum(arg is not None for arg in kwargs.values()) > 1:
- escaped_keys = ["'%s'" % name for name in kwargs.keys()]
+ escaped_keys = [f"'{name}'" for name in kwargs.keys()]
keys_but_last = ", ".join(escaped_keys[:-1])
last_key = escaped_keys[-1]
- msg = "Pass at most one of {keys_but_last} and {last_key}".format(
- keys_but_last=keys_but_last, last_key=last_key
- )
+ msg = f"Pass at most one of {keys_but_last} and {last_key}"
raise ValueError(msg)
@@ -548,7 +546,7 @@ def _bucket_bound_hostname_url(host, scheme=None):
if url_parts.scheme and url_parts.netloc:
return host
- return "{scheme}://{host}/".format(scheme=scheme, host=host)
+ return f"{scheme}://{host}/"
def _api_core_retry_to_resumable_media_retry(retry, num_retries=None):
diff --git a/google/cloud/storage/_http.py b/google/cloud/storage/_http.py
index 9b29f6280..3a739bba6 100644
--- a/google/cloud/storage/_http.py
+++ b/google/cloud/storage/_http.py
@@ -48,9 +48,9 @@ def __init__(self, client, client_info=None, api_endpoint=None):
# TODO: When metrics all use gccl, this should be removed #9552
if self._client_info.user_agent is None: # pragma: no branch
self._client_info.user_agent = ""
- agent_version = "gcloud-python/{}".format(__version__)
+ agent_version = f"gcloud-python/{__version__}"
if agent_version not in self._client_info.user_agent:
- self._client_info.user_agent += " {} ".format(agent_version)
+ self._client_info.user_agent += f" {agent_version} "
API_VERSION = "v1"
"""The version of the API, used in building the API call's URL."""
diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py
index 837ef6211..2c4215802 100644
--- a/google/cloud/storage/_signing.py
+++ b/google/cloud/storage/_signing.py
@@ -158,7 +158,7 @@ def get_expiration_seconds_v4(expiration):
if seconds > SEVEN_DAYS:
raise ValueError(
- "Max allowed expiration interval is seven days {}".format(SEVEN_DAYS)
+ f"Max allowed expiration interval is seven days {SEVEN_DAYS}"
)
return seconds
@@ -252,7 +252,7 @@ def canonicalize_v2(method, resource, query_parameters, headers):
for key, value in query_parameters.items()
)
encoded_qp = urllib.parse.urlencode(normalized_qp)
- canonical_resource = "{}?{}".format(resource, encoded_qp)
+ canonical_resource = f"{resource}?{encoded_qp}"
return _Canonical(method, canonical_resource, normalized_qp, headers)
@@ -550,8 +550,8 @@ def generate_signed_url_v4(
ensure_signed_credentials(credentials)
client_email = credentials.signer_email
- credential_scope = "{}/auto/storage/goog4_request".format(datestamp)
- credential = "{}/{}".format(client_email, credential_scope)
+ credential_scope = f"{datestamp}/auto/storage/goog4_request"
+ credential = f"{client_email}/{credential_scope}"
if headers is None:
headers = {}
@@ -689,7 +689,7 @@ def _sign_message(message, access_token, service_account_email):
if response.status != http.client.OK:
raise exceptions.TransportError(
- "Error calling the IAM signBytes API: {}".format(response.data)
+ f"Error calling the IAM signBytes API: {response.data}"
)
data = json.loads(response.data.decode("utf-8"))
@@ -706,7 +706,7 @@ def _url_encode(query_params):
:returns: URL encoded query params.
"""
params = [
- "{}={}".format(_quote_param(name), _quote_param(value))
+ f"{_quote_param(name)}={_quote_param(value)}"
for name, value in query_params.items()
]
diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py
index ef2bca356..e876c152c 100644
--- a/google/cloud/storage/acl.py
+++ b/google/cloud/storage/acl.py
@@ -120,9 +120,7 @@ def __str__(self):
return "{acl.type}-{acl.identifier}".format(acl=self)
def __repr__(self):
- return "".format(
- acl=self, roles=", ".join(self.roles)
- )
+ return f""
def get_roles(self):
"""Get the list of roles permitted by this entity.
@@ -242,7 +240,7 @@ def validate_predefined(cls, predefined):
"""
predefined = cls.PREDEFINED_XML_ACLS.get(predefined, predefined)
if predefined and predefined not in cls.PREDEFINED_JSON_ACLS:
- raise ValueError("Invalid predefined ACL: %s" % (predefined,))
+ raise ValueError(f"Invalid predefined ACL: {predefined}")
return predefined
def reset(self):
@@ -285,7 +283,7 @@ def entity_from_dict(self, entity_dict):
entity = self.entity(entity_type=entity_type, identifier=identifier)
if not isinstance(entity, _ACLEntity):
- raise ValueError("Invalid dictionary: %s" % entity_dict)
+ raise ValueError(f"Invalid dictionary: {entity_dict}")
entity.grant(role)
return entity
@@ -770,7 +768,7 @@ def client(self):
@property
def reload_path(self):
"""Compute the path for GET API requests for this ACL."""
- return "%s/%s" % (self.bucket.path, self._URL_PATH_ELEM)
+ return f"{self.bucket.path}/{self._URL_PATH_ELEM}"
@property
def save_path(self):
@@ -809,7 +807,7 @@ def client(self):
@property
def reload_path(self):
"""Compute the path for GET API requests for this ACL."""
- return "%s/acl" % self.blob.path
+ return f"{self.blob.path}/acl"
@property
def save_path(self):
diff --git a/google/cloud/storage/batch.py b/google/cloud/storage/batch.py
index cbc93397f..a1b4cbfdc 100644
--- a/google/cloud/storage/batch.py
+++ b/google/cloud/storage/batch.py
@@ -57,9 +57,9 @@ def __init__(self, method, uri, headers, body):
headers["Content-Length"] = len(body)
if body is None:
body = ""
- lines = ["%s %s HTTP/1.1" % (method, uri)]
+ lines = [f"{method} {uri} HTTP/1.1"]
lines.extend(
- ["%s: %s" % (key, value) for key, value in sorted(headers.items())]
+ [f"{key}: {value}" for key, value in sorted(headers.items())]
)
lines.append("")
lines.append(body)
@@ -86,7 +86,7 @@ def get(key, default=None):
:raises: :class:`KeyError` always since the future is intended to fail
as a dictionary.
"""
- raise KeyError("Cannot get(%r, default=%r) on a future" % (key, default))
+ raise KeyError(f"Cannot get({key!r}, default={default!r}) on a future")
def __getitem__(self, key):
"""Stand-in for dict[key].
@@ -97,7 +97,7 @@ def __getitem__(self, key):
:raises: :class:`KeyError` always since the future is intended to fail
as a dictionary.
"""
- raise KeyError("Cannot get item %r from a future" % (key,))
+ raise KeyError(f"Cannot get item {key!r} from a future")
def __setitem__(self, key, value):
"""Stand-in for dict[key] = value.
@@ -111,7 +111,7 @@ def __setitem__(self, key, value):
:raises: :class:`KeyError` always since the future is intended to fail
as a dictionary.
"""
- raise KeyError("Cannot set %r -> %r on a future" % (key, value))
+ raise KeyError(f"Cannot set {key!r} -> {value!r} on a future")
class _FutureResponse(requests.Response):
@@ -257,7 +257,7 @@ def finish(self):
"""
headers, body, timeout = self._prepare_batch_request()
- url = "%s/batch/storage/v1" % self.API_BASE_URL
+ url = f"{self.API_BASE_URL}/batch/storage/v1"
# Use the private ``_base_connection`` rather than the property
# ``_connection``, since the property may be this
@@ -332,7 +332,7 @@ def _unpack_batch_response(response):
subresponse = requests.Response()
subresponse.request = requests.Request(
- method="BATCH", url="contentid://{}".format(content_id)
+ method="BATCH", url=f"contentid://{content_id}"
).prepare()
subresponse.status_code = int(status)
subresponse.headers.update(msg_headers)
diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py
index 8a2b5861c..a89430a49 100644
--- a/google/cloud/storage/blob.py
+++ b/google/cloud/storage/blob.py
@@ -312,7 +312,7 @@ def __repr__(self):
else:
bucket_name = None
- return "" % (bucket_name, self.name, self.generation)
+ return f""
@property
def path(self):
@@ -575,20 +575,16 @@ def generate_signed_url(
quoted_name = _quote(self.name, safe=b"/~")
if virtual_hosted_style:
- api_access_endpoint = "https://{bucket_name}.storage.googleapis.com".format(
- bucket_name=self.bucket.name
- )
+ api_access_endpoint = f"https://{self.bucket.name}.storage.googleapis.com"
elif bucket_bound_hostname:
api_access_endpoint = _bucket_bound_hostname_url(
bucket_bound_hostname, scheme
)
else:
- resource = "/{bucket_name}/{quoted_name}".format(
- bucket_name=self.bucket.name, quoted_name=quoted_name
- )
+ resource = f"/{self.bucket.name}/{quoted_name}"
if virtual_hosted_style or bucket_bound_hostname:
- resource = "/{quoted_name}".format(quoted_name=quoted_name)
+ resource = f"/{quoted_name}"
if credentials is None:
client = self._require_client(client)
@@ -840,7 +836,7 @@ def _get_download_url(
hostname = _get_host_name(client._connection)
base_url = _DOWNLOAD_URL_TEMPLATE.format(hostname=hostname, path=self.path)
if self.generation is not None:
- name_value_pairs.append(("generation", "{:d}".format(self.generation)))
+ name_value_pairs.append(("generation", f"{self.generation:d}"))
else:
base_url = self.media_link
@@ -3095,7 +3091,7 @@ def get_iam_policy(
query_params["optionsRequestedPolicyVersion"] = requested_policy_version
info = client._get_resource(
- "%s/iam" % (self.path,),
+ f"{self.path}/iam",
query_params=query_params,
timeout=timeout,
retry=retry,
@@ -3151,7 +3147,7 @@ def set_iam_policy(
if self.user_project is not None:
query_params["userProject"] = self.user_project
- path = "{}/iam".format(self.path)
+ path = f"{self.path}/iam"
resource = policy.to_api_repr()
resource["resourceId"] = self.path
info = client._put_resource(
@@ -3207,7 +3203,7 @@ def test_iam_permissions(
if self.user_project is not None:
query_params["userProject"] = self.user_project
- path = "%s/iam/testPermissions" % (self.path,)
+ path = f"{self.path}/iam/testPermissions"
resp = client._get_resource(
path,
query_params=query_params,
@@ -3462,7 +3458,7 @@ def compose(
)
api_response = client._post_resource(
- "{}/compose".format(self.path),
+ f"{self.path}/compose",
request,
query_params=query_params,
timeout=timeout,
@@ -3595,7 +3591,7 @@ def rewrite(
if_source_metageneration_not_match=if_source_metageneration_not_match,
)
- path = "{}/rewriteTo{}".format(source.path, self.path)
+ path = f"{source.path}/rewriteTo{self.path}"
api_response = client._post_resource(
path,
self._properties,
@@ -3712,7 +3708,7 @@ def update_storage_class(
(Optional) How to retry the RPC. See: :ref:`configuring_retries`
"""
if new_class not in self.STORAGE_CLASSES:
- raise ValueError("Invalid storage class: %s" % (new_class,))
+ raise ValueError(f"Invalid storage class: {new_class}")
# Update current blob's storage class prior to rewrite
self._patch_property("storageClass", new_class)
@@ -4448,9 +4444,7 @@ def _raise_from_invalid_response(error):
else:
error_message = str(error)
- message = "{method} {url}: {error}".format(
- method=response.request.method, url=response.request.url, error=error_message
- )
+ message = f"{response.request.method} {response.request.url}: {error_message}"
raise exceptions.from_http_status(response.status_code, message, response=response)
diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py
index be99ad141..2448ba4e7 100644
--- a/google/cloud/storage/bucket.py
+++ b/google/cloud/storage/bucket.py
@@ -647,7 +647,7 @@ def __init__(self, client, name=None, user_project=None):
self._user_project = user_project
def __repr__(self):
- return "" % (self.name,)
+ return f""
@property
def client(self):
@@ -2600,7 +2600,7 @@ def storage_class(self, value):
:attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
"""
if value not in self.STORAGE_CLASSES:
- raise ValueError("Invalid storage class: %s" % (value,))
+ raise ValueError(f"Invalid storage class: {value}")
self._patch_property("storageClass", value)
@property
@@ -2801,7 +2801,7 @@ def get_iam_policy(
query_params["optionsRequestedPolicyVersion"] = requested_policy_version
info = client._get_resource(
- "%s/iam" % (self.path,),
+ f"{self.path}/iam",
query_params=query_params,
timeout=timeout,
retry=retry,
@@ -2850,7 +2850,7 @@ def set_iam_policy(
if self.user_project is not None:
query_params["userProject"] = self.user_project
- path = "{}/iam".format(self.path)
+ path = f"{self.path}/iam"
resource = policy.to_api_repr()
resource["resourceId"] = self.path
@@ -2902,7 +2902,7 @@ def test_iam_permissions(
if self.user_project is not None:
query_params["userProject"] = self.user_project
- path = "%s/iam/testPermissions" % (self.path,)
+ path = f"{self.path}/iam/testPermissions"
resp = client._get_resource(
path,
query_params=query_params,
@@ -3207,7 +3207,7 @@ def lock_retention_policy(
if self.user_project is not None:
query_params["userProject"] = self.user_project
- path = "/b/{}/lockRetentionPolicy".format(self.name)
+ path = f"/b/{self.name}/lockRetentionPolicy"
api_response = client._post_resource(
path,
None,
@@ -3341,15 +3341,13 @@ def generate_signed_url(
raise ValueError("'version' must be either 'v2' or 'v4'")
if virtual_hosted_style:
- api_access_endpoint = "https://{bucket_name}.storage.googleapis.com".format(
- bucket_name=self.name
- )
+ api_access_endpoint = f"https://{self.name}.storage.googleapis.com"
elif bucket_bound_hostname:
api_access_endpoint = _bucket_bound_hostname_url(
bucket_bound_hostname, scheme
)
else:
- resource = "/{bucket_name}".format(bucket_name=self.name)
+ resource = f"/{self.name}"
if virtual_hosted_style or bucket_bound_hostname:
resource = "/"
@@ -3390,5 +3388,5 @@ def _raise_if_len_differs(expected_len, **generation_match_args):
for name, value in generation_match_args.items():
if value is not None and len(value) != expected_len:
raise ValueError(
- "'{}' length must be the same as 'blobs' length".format(name)
+ f"'{name}' length must be the same as 'blobs' length"
)
diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py
index 8b63a0198..f905e1be0 100644
--- a/google/cloud/storage/client.py
+++ b/google/cloud/storage/client.py
@@ -272,7 +272,7 @@ def get_service_account_email(
if project is None:
project = self.project
- path = "/projects/%s/serviceAccount" % (project,)
+ path = f"/projects/{project}/serviceAccount"
api_response = self._get_resource(path, timeout=timeout, retry=retry)
return api_response["email_address"]
@@ -1471,7 +1471,7 @@ def create_hmac_key(
if project_id is None:
project_id = self.project
- path = "/projects/{}/hmacKeys".format(project_id)
+ path = f"/projects/{project_id}/hmacKeys"
qs_params = {"serviceAccountEmail": service_account_email}
if user_project is not None:
@@ -1537,7 +1537,7 @@ def list_hmac_keys(
if project_id is None:
project_id = self.project
- path = "/projects/{}/hmacKeys".format(project_id)
+ path = f"/projects/{project_id}/hmacKeys"
extra_params = {}
if service_account_email is not None:
@@ -1747,11 +1747,11 @@ def generate_signed_post_policy_v4(
)
# designate URL
if virtual_hosted_style:
- url = "https://{}.storage.googleapis.com/".format(bucket_name)
+ url = f"https://{bucket_name}.storage.googleapis.com/"
elif bucket_bound_hostname:
url = _bucket_bound_hostname_url(bucket_bound_hostname, scheme)
else:
- url = "https://storage.googleapis.com/{}/".format(bucket_name)
+ url = f"https://storage.googleapis.com/{bucket_name}/"
return {"url": url, "fields": policy_fields}
diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py
index e05663675..bb1dcc598 100644
--- a/google/cloud/storage/fileio.py
+++ b/google/cloud/storage/fileio.py
@@ -106,7 +106,7 @@ def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY, **download_kwargs
for kwarg in download_kwargs:
if kwarg not in VALID_DOWNLOAD_KWARGS:
raise ValueError(
- "BlobReader does not support keyword argument {}.".format(kwarg)
+ f"BlobReader does not support keyword argument {kwarg}."
)
self._blob = blob
@@ -304,7 +304,7 @@ def __init__(
for kwarg in upload_kwargs:
if kwarg not in VALID_UPLOAD_KWARGS:
raise ValueError(
- "BlobWriter does not support keyword argument {}.".format(kwarg)
+ f"BlobWriter does not support keyword argument {kwarg}."
)
self._blob = blob
self._buffer = SlidingBuffer()
diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py
index 1636aaba4..944bc7f87 100644
--- a/google/cloud/storage/hmac_key.py
+++ b/google/cloud/storage/hmac_key.py
@@ -133,9 +133,7 @@ def state(self):
def state(self, value):
if value not in self._SETTABLE_STATES:
raise ValueError(
- "State may only be set to one of: {}".format(
- ", ".join(self._SETTABLE_STATES)
- )
+ f"State may only be set to one of: {', '.join(self._SETTABLE_STATES)}"
)
self._properties["state"] = value
@@ -177,7 +175,7 @@ def path(self):
if project is None:
project = self._client.project
- return "/projects/{}/hmacKeys/{}".format(project, self.access_id)
+ return f"/projects/{project}/hmacKeys/{self.access_id}"
@property
def user_project(self):
diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py
index 0cdb87fa8..f7e72e710 100644
--- a/google/cloud/storage/notification.py
+++ b/google/cloud/storage/notification.py
@@ -202,9 +202,7 @@ def client(self):
@property
def path(self):
"""The URL path for this notification."""
- return "/b/{}/notificationConfigs/{}".format(
- self.bucket.name, self.notification_id
- )
+ return f"/b/{self.bucket.name}/notificationConfigs/{self.notification_id}"
def _require_client(self, client):
"""Check client or verify over-ride.
@@ -254,7 +252,7 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None):
"""
if self.notification_id is not None:
raise ValueError(
- "Notification already exists w/ id: {}".format(self.notification_id)
+ f"Notification already exists w/ id: {self.notification_id}"
)
client = self._require_client(client)
@@ -263,7 +261,7 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None):
if self.bucket.user_project is not None:
query_params["userProject"] = self.bucket.user_project
- path = "/b/{}/notificationConfigs".format(self.bucket.name)
+ path = f"/b/{self.bucket.name}/notificationConfigs"
properties = self._properties.copy()
if self.topic_name is None:
diff --git a/samples/snippets/acl_test.py b/samples/snippets/acl_test.py
index 91856d816..eecee522b 100644
--- a/samples/snippets/acl_test.py
+++ b/samples/snippets/acl_test.py
@@ -46,7 +46,7 @@ def test_bucket():
os.environ["GOOGLE_CLOUD_PROJECT"] = os.environ["MAIN_GOOGLE_CLOUD_PROJECT"]
bucket = None
while bucket is None or bucket.exists():
- bucket_name = "acl-test-{}".format(uuid.uuid4())
+ bucket_name = f"acl-test-{uuid.uuid4()}"
bucket = storage.Client().bucket(bucket_name)
bucket.create()
yield bucket
@@ -59,7 +59,7 @@ def test_bucket():
def test_blob(test_bucket):
"""Yields a blob that is deleted after the test completes."""
bucket = test_bucket
- blob = bucket.blob("storage_acl_test_sigil-{}".format(uuid.uuid4()))
+ blob = bucket.blob(f"storage_acl_test_sigil-{uuid.uuid4()}")
blob.upload_from_string("Hello, is it me you're looking for?")
yield blob
diff --git a/samples/snippets/bucket_lock_test.py b/samples/snippets/bucket_lock_test.py
index 67d4ec685..9b7b4fa2a 100644
--- a/samples/snippets/bucket_lock_test.py
+++ b/samples/snippets/bucket_lock_test.py
@@ -42,7 +42,7 @@ def bucket():
"""Yields a bucket that is deleted after the test completes."""
bucket = None
while bucket is None or bucket.exists():
- bucket_name = "bucket-lock-{}".format(uuid.uuid4())
+ bucket_name = f"bucket-lock-{uuid.uuid4()}"
bucket = storage.Client().bucket(bucket_name)
bucket.create()
yield bucket
@@ -61,7 +61,7 @@ def test_retention_policy_no_lock(bucket, capsys):
storage_get_retention_policy.get_retention_policy(bucket.name)
out, _ = capsys.readouterr()
- assert "Retention Policy for {}".format(bucket.name) in out
+ assert f"Retention Policy for {bucket.name}" in out
assert "Retention Period: 5" in out
assert "Effective Time: " in out
assert "Retention Policy is locked" not in out
@@ -100,11 +100,11 @@ def test_enable_disable_bucket_default_event_based_hold(bucket, capsys):
)
out, _ = capsys.readouterr()
assert (
- "Default event-based hold is not enabled for {}".format(bucket.name)
+ f"Default event-based hold is not enabled for {bucket.name}"
in out
)
assert (
- "Default event-based hold is enabled for {}".format(bucket.name)
+ f"Default event-based hold is enabled for {bucket.name}"
not in out
)
@@ -120,7 +120,7 @@ def test_enable_disable_bucket_default_event_based_hold(bucket, capsys):
)
out, _ = capsys.readouterr()
assert (
- "Default event-based hold is enabled for {}".format(bucket.name) in out
+ f"Default event-based hold is enabled for {bucket.name}" in out
)
# Changes to the bucket will be readable immediately after writing,
diff --git a/samples/snippets/encryption_test.py b/samples/snippets/encryption_test.py
index 6c2377e0f..536c5d334 100644
--- a/samples/snippets/encryption_test.py
+++ b/samples/snippets/encryption_test.py
@@ -62,7 +62,7 @@ def test_upload_encrypted_blob():
def test_blob():
"""Provides a pre-existing blob in the test bucket."""
bucket = storage.Client().bucket(BUCKET)
- blob_name = "test_blob_{}".format(uuid.uuid4().hex)
+ blob_name = f"test_blob_{uuid.uuid4().hex}"
blob = Blob(
blob_name,
bucket,
@@ -81,7 +81,7 @@ def test_blob():
blob.delete()
except NotFound as e:
# For the case that the rotation succeeded.
- print("Ignoring 404, detail: {}".format(e))
+ print(f"Ignoring 404, detail: {e}")
blob = Blob(
blob_name,
bucket,
diff --git a/samples/snippets/fileio_test.py b/samples/snippets/fileio_test.py
index cf98ce1ab..b8a4b8272 100644
--- a/samples/snippets/fileio_test.py
+++ b/samples/snippets/fileio_test.py
@@ -19,14 +19,14 @@
def test_fileio_write_read(bucket, capsys):
- blob_name = "test-fileio-{}".format(uuid.uuid4())
+ blob_name = f"test-fileio-{uuid.uuid4()}"
storage_fileio_write_read.write_read(bucket.name, blob_name)
out, _ = capsys.readouterr()
assert "Hello world" in out
def test_fileio_pandas(bucket, capsys):
- blob_name = "test-fileio-{}".format(uuid.uuid4())
+ blob_name = f"test-fileio-{uuid.uuid4()}"
storage_fileio_pandas.pandas_write(bucket.name, blob_name)
out, _ = capsys.readouterr()
assert f"Wrote csv with pandas with name {blob_name} from bucket {bucket.name}." in out
diff --git a/samples/snippets/iam_test.py b/samples/snippets/iam_test.py
index edeb8427d..7700b6c6a 100644
--- a/samples/snippets/iam_test.py
+++ b/samples/snippets/iam_test.py
@@ -42,7 +42,7 @@ def bucket():
bucket = None
while bucket is None or bucket.exists():
storage_client = storage.Client()
- bucket_name = "test-iam-{}".format(uuid.uuid4())
+ bucket_name = f"test-iam-{uuid.uuid4()}"
bucket = storage_client.bucket(bucket_name)
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
storage_client.create_bucket(bucket)
@@ -60,7 +60,7 @@ def public_bucket():
bucket = None
while bucket is None or bucket.exists():
storage_client = storage.Client()
- bucket_name = "test-iam-{}".format(uuid.uuid4())
+ bucket_name = f"test-iam-{uuid.uuid4()}"
bucket = storage_client.bucket(bucket_name)
bucket.iam_configuration.uniform_bucket_level_access_enabled = True
storage_client.create_bucket(bucket)
diff --git a/samples/snippets/notification_polling.py b/samples/snippets/notification_polling.py
index 34fd8cc3e..d005d8c21 100644
--- a/samples/snippets/notification_polling.py
+++ b/samples/snippets/notification_polling.py
@@ -76,9 +76,7 @@ def summarize(message):
)
if "overwroteGeneration" in attributes:
- description += "\tOverwrote generation: %s\n" % (
- attributes["overwroteGeneration"]
- )
+ description += f"\tOverwrote generation: {attributes['overwroteGeneration']}\n"
if "overwrittenByGeneration" in attributes:
description += "\tOverwritten by generation: %s\n" % (
attributes["overwrittenByGeneration"]
@@ -110,14 +108,14 @@ def poll_notifications(project, subscription_name):
)
def callback(message):
- print("Received message:\n{}".format(summarize(message)))
+ print(f"Received message:\n{summarize(message)}")
message.ack()
subscriber.subscribe(subscription_path, callback=callback)
# The subscriber is non-blocking, so we must keep the main thread from
# exiting to allow it to process messages in the background.
- print("Listening for messages on {}".format(subscription_path))
+ print(f"Listening for messages on {subscription_path}")
while True:
time.sleep(60)
diff --git a/samples/snippets/notification_test.py b/samples/snippets/notification_test.py
index 13553c844..a2fdbe3ef 100644
--- a/samples/snippets/notification_test.py
+++ b/samples/snippets/notification_test.py
@@ -55,7 +55,7 @@ def _notification_topic(storage_client, publisher_client):
binding = policy.bindings.add()
binding.role = "roles/pubsub.publisher"
binding.members.append(
- "serviceAccount:{}".format(storage_client.get_service_account_email())
+ f"serviceAccount:{storage_client.get_service_account_email()}"
)
publisher_client.set_iam_policy(request={"resource": topic_path, "policy": policy})
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 38bb0a572..9f1cc8fb1 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -66,7 +66,7 @@
sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
- print("No user noxfile_config found: detail: {}".format(e))
+ print(f"No user noxfile_config found: detail: {e}")
TEST_CONFIG_OVERRIDE = {}
# Update the TEST_CONFIG with the user supplied values.
@@ -266,7 +266,7 @@ def py(session: nox.sessions.Session) -> None:
_session_tests(session)
else:
session.skip(
- "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ f"SKIPPED: {session.python} tests are disabled for this sample."
)
diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py
index 578e50753..54148b1fb 100644
--- a/samples/snippets/quickstart.py
+++ b/samples/snippets/quickstart.py
@@ -29,7 +29,7 @@ def run_quickstart():
# Creates the new bucket
bucket = storage_client.create_bucket(bucket_name)
- print("Bucket {} created.".format(bucket.name))
+ print(f"Bucket {bucket.name} created.")
# [END storage_quickstart]
diff --git a/samples/snippets/requester_pays_test.py b/samples/snippets/requester_pays_test.py
index 9a178edb0..cf8c2d097 100644
--- a/samples/snippets/requester_pays_test.py
+++ b/samples/snippets/requester_pays_test.py
@@ -34,19 +34,19 @@
def test_enable_requester_pays(capsys):
storage_enable_requester_pays.enable_requester_pays(BUCKET)
out, _ = capsys.readouterr()
- assert "Requester Pays has been enabled for {}".format(BUCKET) in out
+ assert f"Requester Pays has been enabled for {BUCKET}" in out
def test_disable_requester_pays(capsys):
storage_disable_requester_pays.disable_requester_pays(BUCKET)
out, _ = capsys.readouterr()
- assert "Requester Pays has been disabled for {}".format(BUCKET) in out
+ assert f"Requester Pays has been disabled for {BUCKET}" in out
def test_get_requester_pays_status(capsys):
storage_get_requester_pays_status.get_requester_pays_status(BUCKET)
out, _ = capsys.readouterr()
- assert "Requester Pays is disabled for {}".format(BUCKET) in out
+ assert f"Requester Pays is disabled for {BUCKET}" in out
@pytest.fixture
diff --git a/samples/snippets/rpo_test.py b/samples/snippets/rpo_test.py
index d084710a9..f1f16e7fb 100644
--- a/samples/snippets/rpo_test.py
+++ b/samples/snippets/rpo_test.py
@@ -28,7 +28,7 @@ def dual_region_bucket():
"""Yields a dual region bucket that is deleted after the test completes."""
bucket = None
while bucket is None or bucket.exists():
- bucket_name = "bucket-lock-{}".format(uuid.uuid4())
+ bucket_name = f"bucket-lock-{uuid.uuid4()}"
bucket = storage.Client().bucket(bucket_name)
bucket.location = "NAM4"
bucket.create()
@@ -55,7 +55,7 @@ def test_set_rpo_default(dual_region_bucket, capsys):
def test_create_bucket_turbo_replication(capsys):
- bucket_name = "test-rpo-{}".format(uuid.uuid4())
+ bucket_name = f"test-rpo-{uuid.uuid4()}"
storage_create_bucket_turbo_replication.create_bucket_turbo_replication(bucket_name)
out, _ = capsys.readouterr()
assert f"{bucket_name} created with RPO ASYNC_TURBO in NAM4." in out
diff --git a/samples/snippets/snippets_test.py b/samples/snippets/snippets_test.py
index 7a5a3a64f..bdd8c528e 100644
--- a/samples/snippets/snippets_test.py
+++ b/samples/snippets/snippets_test.py
@@ -111,7 +111,7 @@ def test_bucket():
"""Yields a bucket that is deleted after the test completes."""
bucket = None
while bucket is None or bucket.exists():
- bucket_name = "storage-snippets-test-{}".format(uuid.uuid4())
+ bucket_name = f"storage-snippets-test-{uuid.uuid4()}"
bucket = storage.Client().bucket(bucket_name)
bucket.create()
yield bucket
@@ -127,7 +127,7 @@ def test_public_bucket():
bucket = None
while bucket is None or bucket.exists():
storage_client = storage.Client()
- bucket_name = "storage-snippets-test-{}".format(uuid.uuid4())
+ bucket_name = f"storage-snippets-test-{uuid.uuid4()}"
bucket = storage_client.bucket(bucket_name)
storage_client.create_bucket(bucket)
yield bucket
@@ -140,7 +140,7 @@ def test_public_bucket():
def test_blob(test_bucket):
"""Yields a blob that is deleted after the test completes."""
bucket = test_bucket
- blob = bucket.blob("storage_snippets_test_sigil-{}".format(uuid.uuid4()))
+ blob = bucket.blob(f"storage_snippets_test_sigil-{uuid.uuid4()}")
blob.upload_from_string("Hello, is it me you're looking for?")
yield blob
@@ -149,7 +149,7 @@ def test_blob(test_bucket):
def test_public_blob(test_public_bucket):
"""Yields a blob that is deleted after the test completes."""
bucket = test_public_bucket
- blob = bucket.blob("storage_snippets_test_sigil-{}".format(uuid.uuid4()))
+ blob = bucket.blob(f"storage_snippets_test_sigil-{uuid.uuid4()}")
blob.upload_from_string("Hello, is it me you're looking for?")
yield blob
@@ -159,7 +159,7 @@ def test_bucket_create():
"""Yields a bucket object that is deleted after the test completes."""
bucket = None
while bucket is None or bucket.exists():
- bucket_name = "storage-snippets-test-{}".format(uuid.uuid4())
+ bucket_name = f"storage-snippets-test-{uuid.uuid4()}"
bucket = storage.Client().bucket(bucket_name)
yield bucket
bucket.delete(force=True)
@@ -217,7 +217,7 @@ def test_upload_blob_from_stream(test_bucket, capsys):
)
out, _ = capsys.readouterr()
- assert "Stream data uploaded to {}".format("test_upload_blob") in out
+ assert "Stream data uploaded to test_upload_blob" in out
def test_upload_blob_with_kms(test_bucket):
@@ -339,7 +339,7 @@ def test_generate_signed_policy_v4(test_bucket, capsys):
blob_name = "storage_snippets_test_form"
short_name = storage_generate_signed_post_policy_v4
form = short_name.generate_signed_post_policy_v4(test_bucket.name, blob_name)
- assert "name='key' value='{}'".format(blob_name) in form
+ assert f"name='key' value='{blob_name}'" in form
assert "name='x-goog-signature'" in form
assert "name='x-goog-date'" in form
assert "name='x-goog-credential'" in form
@@ -355,7 +355,7 @@ def test_rename_blob(test_blob):
try:
bucket.delete_blob("test_rename_blob")
except google.cloud.exceptions.exceptions.NotFound:
- print("test_rename_blob not found in bucket {}".format(bucket.name))
+ print(f"test_rename_blob not found in bucket {bucket.name}")
storage_rename_file.rename_blob(bucket.name, test_blob.name, "test_rename_blob")
@@ -370,7 +370,7 @@ def test_move_blob(test_bucket_create, test_blob):
try:
test_bucket_create.delete_blob("test_move_blob")
except google.cloud.exceptions.NotFound:
- print("test_move_blob not found in bucket {}".format(test_bucket_create.name))
+ print(f"test_move_blob not found in bucket {test_bucket_create.name}")
storage_move_file.move_blob(
bucket.name, test_blob.name, test_bucket_create.name, "test_move_blob"
@@ -551,7 +551,7 @@ def test_change_file_storage_class(test_blob, capsys):
test_blob.bucket.name, test_blob.name
)
out, _ = capsys.readouterr()
- assert "Blob {} in bucket {}". format(blob.name, blob.bucket.name) in out
+ assert f"Blob {blob.name} in bucket {blob.bucket.name}" in out
assert blob.storage_class == 'NEARLINE'
diff --git a/samples/snippets/storage_activate_hmac_key.py b/samples/snippets/storage_activate_hmac_key.py
index e77cd8066..d3960eb62 100644
--- a/samples/snippets/storage_activate_hmac_key.py
+++ b/samples/snippets/storage_activate_hmac_key.py
@@ -36,14 +36,14 @@ def activate_key(access_id, project_id):
hmac_key.update()
print("The HMAC key metadata is:")
- print("Service Account Email: {}".format(hmac_key.service_account_email))
- print("Key ID: {}".format(hmac_key.id))
- print("Access ID: {}".format(hmac_key.access_id))
- print("Project ID: {}".format(hmac_key.project))
- print("State: {}".format(hmac_key.state))
- print("Created At: {}".format(hmac_key.time_created))
- print("Updated At: {}".format(hmac_key.updated))
- print("Etag: {}".format(hmac_key.etag))
+ print(f"Service Account Email: {hmac_key.service_account_email}")
+ print(f"Key ID: {hmac_key.id}")
+ print(f"Access ID: {hmac_key.access_id}")
+ print(f"Project ID: {hmac_key.project}")
+ print(f"State: {hmac_key.state}")
+ print(f"Created At: {hmac_key.time_created}")
+ print(f"Updated At: {hmac_key.updated}")
+ print(f"Etag: {hmac_key.etag}")
return hmac_key
diff --git a/samples/snippets/storage_add_bucket_conditional_iam_binding.py b/samples/snippets/storage_add_bucket_conditional_iam_binding.py
index ddc0fc028..d09f528cf 100644
--- a/samples/snippets/storage_add_bucket_conditional_iam_binding.py
+++ b/samples/snippets/storage_add_bucket_conditional_iam_binding.py
@@ -53,15 +53,15 @@ def add_bucket_conditional_iam_binding(
bucket.set_iam_policy(policy)
- print("Added the following member(s) with role {} to {}:".format(role, bucket_name))
+ print(f"Added the following member(s) with role {role} to {bucket_name}:")
for member in members:
- print(" {}".format(member))
+ print(f" {member}")
print("with condition:")
- print(" Title: {}".format(title))
- print(" Description: {}".format(description))
- print(" Expression: {}".format(expression))
+ print(f" Title: {title}")
+ print(f" Description: {description}")
+ print(f" Expression: {expression}")
# [END storage_add_bucket_conditional_iam_binding]
diff --git a/samples/snippets/storage_add_bucket_iam_member.py b/samples/snippets/storage_add_bucket_iam_member.py
index 727f18483..0d610eae7 100644
--- a/samples/snippets/storage_add_bucket_iam_member.py
+++ b/samples/snippets/storage_add_bucket_iam_member.py
@@ -35,7 +35,7 @@ def add_bucket_iam_member(bucket_name, role, member):
bucket.set_iam_policy(policy)
- print("Added {} with role {} to {}.".format(member, role, bucket_name))
+ print(f"Added {member} with role {role} to {bucket_name}.")
# [END storage_add_bucket_iam_member]
diff --git a/samples/snippets/storage_add_bucket_label.py b/samples/snippets/storage_add_bucket_label.py
index 8ae8fe1f4..9c6fcff7a 100644
--- a/samples/snippets/storage_add_bucket_label.py
+++ b/samples/snippets/storage_add_bucket_label.py
@@ -36,7 +36,7 @@ def add_bucket_label(bucket_name):
bucket.labels = labels
bucket.patch()
- print("Updated labels on {}.".format(bucket.name))
+ print(f"Updated labels on {bucket.name}.")
pprint.pprint(bucket.labels)
diff --git a/samples/snippets/storage_add_bucket_owner.py b/samples/snippets/storage_add_bucket_owner.py
index acdb60dc5..bac1f3f64 100644
--- a/samples/snippets/storage_add_bucket_owner.py
+++ b/samples/snippets/storage_add_bucket_owner.py
@@ -40,9 +40,7 @@ def add_bucket_owner(bucket_name, user_email):
bucket.acl.save()
print(
- "Added user {} as an owner on bucket {}.".format(
- user_email, bucket_name
- )
+ f"Added user {user_email} as an owner on bucket {bucket_name}."
)
diff --git a/samples/snippets/storage_bucket_delete_default_kms_key.py b/samples/snippets/storage_bucket_delete_default_kms_key.py
index 3df23767d..0db293756 100644
--- a/samples/snippets/storage_bucket_delete_default_kms_key.py
+++ b/samples/snippets/storage_bucket_delete_default_kms_key.py
@@ -30,7 +30,7 @@ def bucket_delete_default_kms_key(bucket_name):
bucket.default_kms_key_name = None
bucket.patch()
- print("Default KMS key was removed from {}".format(bucket.name))
+ print(f"Default KMS key was removed from {bucket.name}")
return bucket
diff --git a/samples/snippets/storage_change_default_storage_class.py b/samples/snippets/storage_change_default_storage_class.py
index 8a72719ba..5d2f924ad 100644
--- a/samples/snippets/storage_change_default_storage_class.py
+++ b/samples/snippets/storage_change_default_storage_class.py
@@ -31,7 +31,7 @@ def change_default_storage_class(bucket_name):
bucket.storage_class = constants.COLDLINE_STORAGE_CLASS
bucket.patch()
- print("Default storage class for bucket {} has been set to {}".format(bucket_name, bucket.storage_class))
+ print(f"Default storage class for bucket {bucket_name} has been set to {bucket.storage_class}")
return bucket
diff --git a/samples/snippets/storage_configure_retries.py b/samples/snippets/storage_configure_retries.py
index 9543111b3..ef1e422b6 100644
--- a/samples/snippets/storage_configure_retries.py
+++ b/samples/snippets/storage_configure_retries.py
@@ -53,7 +53,7 @@ def configure_retries(bucket_name, blob_name):
)
blob.delete(retry=modified_retry)
- print("Blob {} deleted with a customized retry strategy.".format(blob_name))
+ print(f"Blob {blob_name} deleted with a customized retry strategy.")
# [END storage_configure_retries]
diff --git a/samples/snippets/storage_cors_configuration.py b/samples/snippets/storage_cors_configuration.py
index 3d2595a9d..2c5dd2428 100644
--- a/samples/snippets/storage_cors_configuration.py
+++ b/samples/snippets/storage_cors_configuration.py
@@ -38,7 +38,7 @@ def cors_configuration(bucket_name):
]
bucket.patch()
- print("Set CORS policies for bucket {} is {}".format(bucket.name, bucket.cors))
+ print(f"Set CORS policies for bucket {bucket.name} is {bucket.cors}")
return bucket
diff --git a/samples/snippets/storage_create_bucket.py b/samples/snippets/storage_create_bucket.py
index aaee9e234..c95f32f56 100644
--- a/samples/snippets/storage_create_bucket.py
+++ b/samples/snippets/storage_create_bucket.py
@@ -28,7 +28,7 @@ def create_bucket(bucket_name):
bucket = storage_client.create_bucket(bucket_name)
- print("Bucket {} created".format(bucket.name))
+ print(f"Bucket {bucket.name} created")
# [END storage_create_bucket]
diff --git a/samples/snippets/storage_create_hmac_key.py b/samples/snippets/storage_create_hmac_key.py
index 27a418c39..d845738b7 100644
--- a/samples/snippets/storage_create_hmac_key.py
+++ b/samples/snippets/storage_create_hmac_key.py
@@ -33,17 +33,17 @@ def create_key(project_id, service_account_email):
service_account_email=service_account_email, project_id=project_id
)
- print("The base64 encoded secret is {}".format(secret))
+ print(f"The base64 encoded secret is {secret}")
print("Do not miss that secret, there is no API to recover it.")
print("The HMAC key metadata is:")
- print("Service Account Email: {}".format(hmac_key.service_account_email))
- print("Key ID: {}".format(hmac_key.id))
- print("Access ID: {}".format(hmac_key.access_id))
- print("Project ID: {}".format(hmac_key.project))
- print("State: {}".format(hmac_key.state))
- print("Created At: {}".format(hmac_key.time_created))
- print("Updated At: {}".format(hmac_key.updated))
- print("Etag: {}".format(hmac_key.etag))
+ print(f"Service Account Email: {hmac_key.service_account_email}")
+ print(f"Key ID: {hmac_key.id}")
+ print(f"Access ID: {hmac_key.access_id}")
+ print(f"Project ID: {hmac_key.project}")
+ print(f"State: {hmac_key.state}")
+ print(f"Created At: {hmac_key.time_created}")
+ print(f"Updated At: {hmac_key.updated}")
+ print(f"Etag: {hmac_key.etag}")
return hmac_key
diff --git a/samples/snippets/storage_deactivate_hmac_key.py b/samples/snippets/storage_deactivate_hmac_key.py
index 389efb998..007f7b5a5 100644
--- a/samples/snippets/storage_deactivate_hmac_key.py
+++ b/samples/snippets/storage_deactivate_hmac_key.py
@@ -37,14 +37,14 @@ def deactivate_key(access_id, project_id):
print("The HMAC key is now inactive.")
print("The HMAC key metadata is:")
- print("Service Account Email: {}".format(hmac_key.service_account_email))
- print("Key ID: {}".format(hmac_key.id))
- print("Access ID: {}".format(hmac_key.access_id))
- print("Project ID: {}".format(hmac_key.project))
- print("State: {}".format(hmac_key.state))
- print("Created At: {}".format(hmac_key.time_created))
- print("Updated At: {}".format(hmac_key.updated))
- print("Etag: {}".format(hmac_key.etag))
+ print(f"Service Account Email: {hmac_key.service_account_email}")
+ print(f"Key ID: {hmac_key.id}")
+ print(f"Access ID: {hmac_key.access_id}")
+ print(f"Project ID: {hmac_key.project}")
+ print(f"State: {hmac_key.state}")
+ print(f"Created At: {hmac_key.time_created}")
+ print(f"Updated At: {hmac_key.updated}")
+ print(f"Etag: {hmac_key.etag}")
return hmac_key
diff --git a/samples/snippets/storage_delete_bucket.py b/samples/snippets/storage_delete_bucket.py
index b3e264c74..b12c06636 100644
--- a/samples/snippets/storage_delete_bucket.py
+++ b/samples/snippets/storage_delete_bucket.py
@@ -29,7 +29,7 @@ def delete_bucket(bucket_name):
bucket = storage_client.get_bucket(bucket_name)
bucket.delete()
- print("Bucket {} deleted".format(bucket.name))
+ print(f"Bucket {bucket.name} deleted")
# [END storage_delete_bucket]
diff --git a/samples/snippets/storage_delete_file.py b/samples/snippets/storage_delete_file.py
index 1105f3725..b2997c86b 100644
--- a/samples/snippets/storage_delete_file.py
+++ b/samples/snippets/storage_delete_file.py
@@ -31,7 +31,7 @@ def delete_blob(bucket_name, blob_name):
blob = bucket.blob(blob_name)
blob.delete()
- print("Blob {} deleted.".format(blob_name))
+ print(f"Blob {blob_name} deleted.")
# [END storage_delete_file]
diff --git a/samples/snippets/storage_delete_file_archived_generation.py b/samples/snippets/storage_delete_file_archived_generation.py
index 4e4909001..ff02bca23 100644
--- a/samples/snippets/storage_delete_file_archived_generation.py
+++ b/samples/snippets/storage_delete_file_archived_generation.py
@@ -31,9 +31,7 @@ def delete_file_archived_generation(bucket_name, blob_name, generation):
bucket = storage_client.get_bucket(bucket_name)
bucket.delete_blob(blob_name, generation=generation)
print(
- "Generation {} of blob {} was deleted from {}".format(
- generation, blob_name, bucket_name
- )
+ f"Generation {generation} of blob {blob_name} was deleted from {bucket_name}"
)
diff --git a/samples/snippets/storage_disable_bucket_lifecycle_management.py b/samples/snippets/storage_disable_bucket_lifecycle_management.py
index 9ef6971fb..a5fa56fcf 100644
--- a/samples/snippets/storage_disable_bucket_lifecycle_management.py
+++ b/samples/snippets/storage_disable_bucket_lifecycle_management.py
@@ -31,7 +31,7 @@ def disable_bucket_lifecycle_management(bucket_name):
bucket.patch()
rules = bucket.lifecycle_rules
- print("Lifecycle management is disable for bucket {} and the rules are {}".format(bucket_name, list(rules)))
+ print(f"Lifecycle management is disable for bucket {bucket_name} and the rules are {list(rules)}")
return bucket
diff --git a/samples/snippets/storage_disable_default_event_based_hold.py b/samples/snippets/storage_disable_default_event_based_hold.py
index dff3ed3c1..48becdac1 100644
--- a/samples/snippets/storage_disable_default_event_based_hold.py
+++ b/samples/snippets/storage_disable_default_event_based_hold.py
@@ -30,7 +30,7 @@ def disable_default_event_based_hold(bucket_name):
bucket.default_event_based_hold = False
bucket.patch()
- print("Default event based hold was disabled for {}".format(bucket_name))
+ print(f"Default event based hold was disabled for {bucket_name}")
# [END storage_disable_default_event_based_hold]
diff --git a/samples/snippets/storage_disable_requester_pays.py b/samples/snippets/storage_disable_requester_pays.py
index c49cc28ea..78e195d8a 100644
--- a/samples/snippets/storage_disable_requester_pays.py
+++ b/samples/snippets/storage_disable_requester_pays.py
@@ -30,7 +30,7 @@ def disable_requester_pays(bucket_name):
bucket.requester_pays = False
bucket.patch()
- print("Requester Pays has been disabled for {}".format(bucket_name))
+ print(f"Requester Pays has been disabled for {bucket_name}")
# [END storage_disable_requester_pays]
diff --git a/samples/snippets/storage_disable_uniform_bucket_level_access.py b/samples/snippets/storage_disable_uniform_bucket_level_access.py
index 4f4691611..20a045686 100644
--- a/samples/snippets/storage_disable_uniform_bucket_level_access.py
+++ b/samples/snippets/storage_disable_uniform_bucket_level_access.py
@@ -31,7 +31,7 @@ def disable_uniform_bucket_level_access(bucket_name):
bucket.patch()
print(
- "Uniform bucket-level access was disabled for {}.".format(bucket.name)
+ f"Uniform bucket-level access was disabled for {bucket.name}."
)
diff --git a/samples/snippets/storage_disable_versioning.py b/samples/snippets/storage_disable_versioning.py
index 98832ba68..9dfd0ff90 100644
--- a/samples/snippets/storage_disable_versioning.py
+++ b/samples/snippets/storage_disable_versioning.py
@@ -30,7 +30,7 @@ def disable_versioning(bucket_name):
bucket.versioning_enabled = False
bucket.patch()
- print("Versioning was disabled for bucket {}".format(bucket))
+ print(f"Versioning was disabled for bucket {bucket}")
return bucket
diff --git a/samples/snippets/storage_download_encrypted_file.py b/samples/snippets/storage_download_encrypted_file.py
index ac7071fbe..8a81b0de5 100644
--- a/samples/snippets/storage_download_encrypted_file.py
+++ b/samples/snippets/storage_download_encrypted_file.py
@@ -52,9 +52,7 @@ def download_encrypted_blob(
blob.download_to_filename(destination_file_name)
print(
- "Blob {} downloaded to {}.".format(
- source_blob_name, destination_file_name
- )
+ f"Blob {source_blob_name} downloaded to {destination_file_name}."
)
diff --git a/samples/snippets/storage_download_to_stream.py b/samples/snippets/storage_download_to_stream.py
index 1cb8dcc7b..3834e34c9 100644
--- a/samples/snippets/storage_download_to_stream.py
+++ b/samples/snippets/storage_download_to_stream.py
@@ -42,7 +42,7 @@ def download_blob_to_stream(bucket_name, source_blob_name, file_obj):
blob = bucket.blob(source_blob_name)
blob.download_to_file(file_obj)
- print("Downloaded blob {} to file-like object.".format(source_blob_name))
+ print(f"Downloaded blob {source_blob_name} to file-like object.")
return file_obj
# Before reading from file_obj, remember to rewind with file_obj.seek(0).
diff --git a/samples/snippets/storage_enable_bucket_lifecycle_management.py b/samples/snippets/storage_enable_bucket_lifecycle_management.py
index 61c7d7b20..0bbff079c 100644
--- a/samples/snippets/storage_enable_bucket_lifecycle_management.py
+++ b/samples/snippets/storage_enable_bucket_lifecycle_management.py
@@ -29,12 +29,12 @@ def enable_bucket_lifecycle_management(bucket_name):
bucket = storage_client.get_bucket(bucket_name)
rules = bucket.lifecycle_rules
- print("Lifecycle management rules for bucket {} are {}".format(bucket_name, list(rules)))
+ print(f"Lifecycle management rules for bucket {bucket_name} are {list(rules)}")
bucket.add_lifecycle_delete_rule(age=2)
bucket.patch()
rules = bucket.lifecycle_rules
- print("Lifecycle management is enable for bucket {} and the rules are {}".format(bucket_name, list(rules)))
+ print(f"Lifecycle management is enable for bucket {bucket_name} and the rules are {list(rules)}")
return bucket
diff --git a/samples/snippets/storage_enable_default_event_based_hold.py b/samples/snippets/storage_enable_default_event_based_hold.py
index a535390c9..5dfdf94a9 100644
--- a/samples/snippets/storage_enable_default_event_based_hold.py
+++ b/samples/snippets/storage_enable_default_event_based_hold.py
@@ -30,7 +30,7 @@ def enable_default_event_based_hold(bucket_name):
bucket.default_event_based_hold = True
bucket.patch()
- print("Default event based hold was enabled for {}".format(bucket_name))
+ print(f"Default event based hold was enabled for {bucket_name}")
# [END storage_enable_default_event_based_hold]
diff --git a/samples/snippets/storage_enable_requester_pays.py b/samples/snippets/storage_enable_requester_pays.py
index 9787008dd..fbecb04f4 100644
--- a/samples/snippets/storage_enable_requester_pays.py
+++ b/samples/snippets/storage_enable_requester_pays.py
@@ -30,7 +30,7 @@ def enable_requester_pays(bucket_name):
bucket.requester_pays = True
bucket.patch()
- print("Requester Pays has been enabled for {}".format(bucket_name))
+ print(f"Requester Pays has been enabled for {bucket_name}")
# [END storage_enable_requester_pays]
diff --git a/samples/snippets/storage_enable_uniform_bucket_level_access.py b/samples/snippets/storage_enable_uniform_bucket_level_access.py
index c689bb735..9ab71ae37 100644
--- a/samples/snippets/storage_enable_uniform_bucket_level_access.py
+++ b/samples/snippets/storage_enable_uniform_bucket_level_access.py
@@ -31,7 +31,7 @@ def enable_uniform_bucket_level_access(bucket_name):
bucket.patch()
print(
- "Uniform bucket-level access was enabled for {}.".format(bucket.name)
+ f"Uniform bucket-level access was enabled for {bucket.name}."
)
diff --git a/samples/snippets/storage_enable_versioning.py b/samples/snippets/storage_enable_versioning.py
index 89693e426..9cdc98001 100644
--- a/samples/snippets/storage_enable_versioning.py
+++ b/samples/snippets/storage_enable_versioning.py
@@ -30,7 +30,7 @@ def enable_versioning(bucket_name):
bucket.versioning_enabled = True
bucket.patch()
- print("Versioning was enabled for bucket {}".format(bucket.name))
+ print(f"Versioning was enabled for bucket {bucket.name}")
return bucket
diff --git a/samples/snippets/storage_generate_encryption_key.py b/samples/snippets/storage_generate_encryption_key.py
index a973418a6..dbeb46b91 100644
--- a/samples/snippets/storage_generate_encryption_key.py
+++ b/samples/snippets/storage_generate_encryption_key.py
@@ -30,7 +30,7 @@ def generate_encryption_key():
key = os.urandom(32)
encoded_key = base64.b64encode(key).decode("utf-8")
- print("Base 64 encoded encryption key: {}".format(encoded_key))
+ print(f"Base 64 encoded encryption key: {encoded_key}")
# [END storage_generate_encryption_key]
diff --git a/samples/snippets/storage_generate_signed_post_policy_v4.py b/samples/snippets/storage_generate_signed_post_policy_v4.py
index 8217714e2..0c06ddc2f 100644
--- a/samples/snippets/storage_generate_signed_post_policy_v4.py
+++ b/samples/snippets/storage_generate_signed_post_policy_v4.py
@@ -46,7 +46,7 @@ def generate_signed_post_policy_v4(bucket_name, blob_name):
# Include all fields returned in the HTML form as they're required
for key, value in policy["fields"].items():
- form += " \n".format(key, value)
+ form += f" \n"
form += "
\n"
form += "
\n"
diff --git a/samples/snippets/storage_generate_signed_url_v2.py b/samples/snippets/storage_generate_signed_url_v2.py
index abea3dd54..f1317ea2f 100644
--- a/samples/snippets/storage_generate_signed_url_v2.py
+++ b/samples/snippets/storage_generate_signed_url_v2.py
@@ -44,7 +44,7 @@ def generate_signed_url(bucket_name, blob_name):
method="GET",
)
- print("The signed url for {} is {}".format(blob.name, url))
+ print(f"The signed url for {blob.name} is {url}")
return url
diff --git a/samples/snippets/storage_generate_signed_url_v4.py b/samples/snippets/storage_generate_signed_url_v4.py
index 2a45b23e9..80625a7b3 100644
--- a/samples/snippets/storage_generate_signed_url_v4.py
+++ b/samples/snippets/storage_generate_signed_url_v4.py
@@ -49,7 +49,7 @@ def generate_download_signed_url_v4(bucket_name, blob_name):
print("Generated GET signed URL:")
print(url)
print("You can use this URL with any user agent, for example:")
- print("curl '{}'".format(url))
+ print(f"curl '{url}'")
return url
diff --git a/samples/snippets/storage_get_default_event_based_hold.py b/samples/snippets/storage_get_default_event_based_hold.py
index 4cf13914d..08a05f8ef 100644
--- a/samples/snippets/storage_get_default_event_based_hold.py
+++ b/samples/snippets/storage_get_default_event_based_hold.py
@@ -29,12 +29,10 @@ def get_default_event_based_hold(bucket_name):
bucket = storage_client.get_bucket(bucket_name)
if bucket.default_event_based_hold:
- print("Default event-based hold is enabled for {}".format(bucket_name))
+ print(f"Default event-based hold is enabled for {bucket_name}")
else:
print(
- "Default event-based hold is not enabled for {}".format(
- bucket_name
- )
+ f"Default event-based hold is not enabled for {bucket_name}"
)
diff --git a/samples/snippets/storage_get_hmac_key.py b/samples/snippets/storage_get_hmac_key.py
index 4dc52240d..82b28ff99 100644
--- a/samples/snippets/storage_get_hmac_key.py
+++ b/samples/snippets/storage_get_hmac_key.py
@@ -34,14 +34,14 @@ def get_key(access_id, project_id):
)
print("The HMAC key metadata is:")
- print("Service Account Email: {}".format(hmac_key.service_account_email))
- print("Key ID: {}".format(hmac_key.id))
- print("Access ID: {}".format(hmac_key.access_id))
- print("Project ID: {}".format(hmac_key.project))
- print("State: {}".format(hmac_key.state))
- print("Created At: {}".format(hmac_key.time_created))
- print("Updated At: {}".format(hmac_key.updated))
- print("Etag: {}".format(hmac_key.etag))
+ print(f"Service Account Email: {hmac_key.service_account_email}")
+ print(f"Key ID: {hmac_key.id}")
+ print(f"Access ID: {hmac_key.access_id}")
+ print(f"Project ID: {hmac_key.project}")
+ print(f"State: {hmac_key.state}")
+ print(f"Created At: {hmac_key.time_created}")
+ print(f"Updated At: {hmac_key.updated}")
+ print(f"Etag: {hmac_key.etag}")
return hmac_key
diff --git a/samples/snippets/storage_get_metadata.py b/samples/snippets/storage_get_metadata.py
index 3ce7ecea8..eece8028a 100644
--- a/samples/snippets/storage_get_metadata.py
+++ b/samples/snippets/storage_get_metadata.py
@@ -33,27 +33,27 @@ def blob_metadata(bucket_name, blob_name):
# make an HTTP request.
blob = bucket.get_blob(blob_name)
- print("Blob: {}".format(blob.name))
- print("Bucket: {}".format(blob.bucket.name))
- print("Storage class: {}".format(blob.storage_class))
- print("ID: {}".format(blob.id))
- print("Size: {} bytes".format(blob.size))
- print("Updated: {}".format(blob.updated))
- print("Generation: {}".format(blob.generation))
- print("Metageneration: {}".format(blob.metageneration))
- print("Etag: {}".format(blob.etag))
- print("Owner: {}".format(blob.owner))
- print("Component count: {}".format(blob.component_count))
- print("Crc32c: {}".format(blob.crc32c))
- print("md5_hash: {}".format(blob.md5_hash))
- print("Cache-control: {}".format(blob.cache_control))
- print("Content-type: {}".format(blob.content_type))
- print("Content-disposition: {}".format(blob.content_disposition))
- print("Content-encoding: {}".format(blob.content_encoding))
- print("Content-language: {}".format(blob.content_language))
- print("Metadata: {}".format(blob.metadata))
- print("Medialink: {}".format(blob.media_link))
- print("Custom Time: {}".format(blob.custom_time))
+ print(f"Blob: {blob.name}")
+ print(f"Bucket: {blob.bucket.name}")
+ print(f"Storage class: {blob.storage_class}")
+ print(f"ID: {blob.id}")
+ print(f"Size: {blob.size} bytes")
+ print(f"Updated: {blob.updated}")
+ print(f"Generation: {blob.generation}")
+ print(f"Metageneration: {blob.metageneration}")
+ print(f"Etag: {blob.etag}")
+ print(f"Owner: {blob.owner}")
+ print(f"Component count: {blob.component_count}")
+ print(f"Crc32c: {blob.crc32c}")
+ print(f"md5_hash: {blob.md5_hash}")
+ print(f"Cache-control: {blob.cache_control}")
+ print(f"Content-type: {blob.content_type}")
+ print(f"Content-disposition: {blob.content_disposition}")
+ print(f"Content-encoding: {blob.content_encoding}")
+ print(f"Content-language: {blob.content_language}")
+ print(f"Metadata: {blob.metadata}")
+ print(f"Medialink: {blob.media_link}")
+ print(f"Custom Time: {blob.custom_time}")
print("Temporary hold: ", "enabled" if blob.temporary_hold else "disabled")
print(
"Event based hold: ",
@@ -61,9 +61,7 @@ def blob_metadata(bucket_name, blob_name):
)
if blob.retention_expiration_time:
print(
- "retentionExpirationTime: {}".format(
- blob.retention_expiration_time
- )
+ f"retentionExpirationTime: {blob.retention_expiration_time}"
)
diff --git a/samples/snippets/storage_get_requester_pays_status.py b/samples/snippets/storage_get_requester_pays_status.py
index 2014d654c..a2eeb34d7 100644
--- a/samples/snippets/storage_get_requester_pays_status.py
+++ b/samples/snippets/storage_get_requester_pays_status.py
@@ -29,9 +29,9 @@ def get_requester_pays_status(bucket_name):
requester_pays_status = bucket.requester_pays
if requester_pays_status:
- print("Requester Pays is enabled for {}".format(bucket_name))
+ print(f"Requester Pays is enabled for {bucket_name}")
else:
- print("Requester Pays is disabled for {}".format(bucket_name))
+ print(f"Requester Pays is disabled for {bucket_name}")
# [END storage_get_requester_pays_status]
diff --git a/samples/snippets/storage_get_retention_policy.py b/samples/snippets/storage_get_retention_policy.py
index f2ca26d26..215f80d5a 100644
--- a/samples/snippets/storage_get_retention_policy.py
+++ b/samples/snippets/storage_get_retention_policy.py
@@ -28,14 +28,14 @@ def get_retention_policy(bucket_name):
bucket = storage_client.bucket(bucket_name)
bucket.reload()
- print("Retention Policy for {}".format(bucket_name))
- print("Retention Period: {}".format(bucket.retention_period))
+ print(f"Retention Policy for {bucket_name}")
+ print(f"Retention Period: {bucket.retention_period}")
if bucket.retention_policy_locked:
print("Retention Policy is locked")
if bucket.retention_policy_effective_time:
print(
- "Effective Time: {}".format(bucket.retention_policy_effective_time)
+ f"Effective Time: {bucket.retention_policy_effective_time}"
)
diff --git a/samples/snippets/storage_get_service_account.py b/samples/snippets/storage_get_service_account.py
index 58ababb91..5ac0e5638 100644
--- a/samples/snippets/storage_get_service_account.py
+++ b/samples/snippets/storage_get_service_account.py
@@ -25,9 +25,7 @@ def get_service_account():
email = storage_client.get_service_account_email()
print(
- "The GCS service account for project {} is: {} ".format(
- storage_client.project, email
- )
+ f"The GCS service account for project {storage_client.project} is: {email} "
)
diff --git a/samples/snippets/storage_get_uniform_bucket_level_access.py b/samples/snippets/storage_get_uniform_bucket_level_access.py
index eddb8bc1a..206b9f1ff 100644
--- a/samples/snippets/storage_get_uniform_bucket_level_access.py
+++ b/samples/snippets/storage_get_uniform_bucket_level_access.py
@@ -30,9 +30,7 @@ def get_uniform_bucket_level_access(bucket_name):
if iam_configuration.uniform_bucket_level_access_enabled:
print(
- "Uniform bucket-level access is enabled for {}.".format(
- bucket.name
- )
+ f"Uniform bucket-level access is enabled for {bucket.name}."
)
print(
"Bucket will be locked on {}.".format(
@@ -41,9 +39,7 @@ def get_uniform_bucket_level_access(bucket_name):
)
else:
print(
- "Uniform bucket-level access is disabled for {}.".format(
- bucket.name
- )
+ f"Uniform bucket-level access is disabled for {bucket.name}."
)
diff --git a/samples/snippets/storage_list_file_archived_generations.py b/samples/snippets/storage_list_file_archived_generations.py
index dc2f5eaf5..419cc3da4 100644
--- a/samples/snippets/storage_list_file_archived_generations.py
+++ b/samples/snippets/storage_list_file_archived_generations.py
@@ -29,7 +29,7 @@ def list_file_archived_generations(bucket_name):
blobs = storage_client.list_blobs(bucket_name, versions=True)
for blob in blobs:
- print("{},{}".format(blob.name, blob.generation))
+ print(f"{blob.name},{blob.generation}")
# [END storage_list_file_archived_generations]
diff --git a/samples/snippets/storage_list_hmac_keys.py b/samples/snippets/storage_list_hmac_keys.py
index 8e5c53b58..a09616fa5 100644
--- a/samples/snippets/storage_list_hmac_keys.py
+++ b/samples/snippets/storage_list_hmac_keys.py
@@ -31,9 +31,9 @@ def list_keys(project_id):
print("HMAC Keys:")
for hmac_key in hmac_keys:
print(
- "Service Account Email: {}".format(hmac_key.service_account_email)
+ f"Service Account Email: {hmac_key.service_account_email}"
)
- print("Access ID: {}".format(hmac_key.access_id))
+ print(f"Access ID: {hmac_key.access_id}")
return hmac_keys
diff --git a/samples/snippets/storage_lock_retention_policy.py b/samples/snippets/storage_lock_retention_policy.py
index d59572f5d..adff364d7 100644
--- a/samples/snippets/storage_lock_retention_policy.py
+++ b/samples/snippets/storage_lock_retention_policy.py
@@ -33,11 +33,9 @@ def lock_retention_policy(bucket_name):
# and retention period can only be increased.
bucket.lock_retention_policy()
- print("Retention policy for {} is now locked".format(bucket_name))
+ print(f"Retention policy for {bucket_name} is now locked")
print(
- "Retention policy effective as of {}".format(
- bucket.retention_policy_effective_time
- )
+ f"Retention policy effective as of {bucket.retention_policy_effective_time}"
)
diff --git a/samples/snippets/storage_make_public.py b/samples/snippets/storage_make_public.py
index 79ae40d12..489508cf6 100644
--- a/samples/snippets/storage_make_public.py
+++ b/samples/snippets/storage_make_public.py
@@ -32,9 +32,7 @@ def make_blob_public(bucket_name, blob_name):
blob.make_public()
print(
- "Blob {} is publicly accessible at {}".format(
- blob.name, blob.public_url
- )
+ f"Blob {blob.name} is publicly accessible at {blob.public_url}"
)
diff --git a/samples/snippets/storage_object_get_kms_key.py b/samples/snippets/storage_object_get_kms_key.py
index dddfc9151..7604e6eba 100644
--- a/samples/snippets/storage_object_get_kms_key.py
+++ b/samples/snippets/storage_object_get_kms_key.py
@@ -32,7 +32,7 @@ def object_get_kms_key(bucket_name, blob_name):
kms_key = blob.kms_key_name
- print("The KMS key of a blob is {}".format(blob.kms_key_name))
+ print(f"The KMS key of a blob is {blob.kms_key_name}")
return kms_key
diff --git a/samples/snippets/storage_print_bucket_acl.py b/samples/snippets/storage_print_bucket_acl.py
index 0804f7a9a..55417f1bc 100644
--- a/samples/snippets/storage_print_bucket_acl.py
+++ b/samples/snippets/storage_print_bucket_acl.py
@@ -27,7 +27,7 @@ def print_bucket_acl(bucket_name):
bucket = storage_client.bucket(bucket_name)
for entry in bucket.acl:
- print("{}: {}".format(entry["role"], entry["entity"]))
+ print(f"{entry['role']}: {entry['entity']}")
# [END storage_print_bucket_acl]
diff --git a/samples/snippets/storage_print_file_acl.py b/samples/snippets/storage_print_file_acl.py
index f34a5283b..8dfc4e984 100644
--- a/samples/snippets/storage_print_file_acl.py
+++ b/samples/snippets/storage_print_file_acl.py
@@ -28,7 +28,7 @@ def print_blob_acl(bucket_name, blob_name):
blob = bucket.blob(blob_name)
for entry in blob.acl:
- print("{}: {}".format(entry["role"], entry["entity"]))
+ print(f"{entry['role']}: {entry['entity']}")
# [END storage_print_file_acl]
diff --git a/samples/snippets/storage_release_event_based_hold.py b/samples/snippets/storage_release_event_based_hold.py
index 8c3c11b6f..1db637cd9 100644
--- a/samples/snippets/storage_release_event_based_hold.py
+++ b/samples/snippets/storage_release_event_based_hold.py
@@ -33,7 +33,7 @@ def release_event_based_hold(bucket_name, blob_name):
blob.event_based_hold = False
blob.patch()
- print("Event based hold was released for {}".format(blob_name))
+ print(f"Event based hold was released for {blob_name}")
# [END storage_release_event_based_hold]
diff --git a/samples/snippets/storage_remove_bucket_default_owner.py b/samples/snippets/storage_remove_bucket_default_owner.py
index beaf6be84..e6f3c495e 100644
--- a/samples/snippets/storage_remove_bucket_default_owner.py
+++ b/samples/snippets/storage_remove_bucket_default_owner.py
@@ -40,9 +40,7 @@ def remove_bucket_default_owner(bucket_name, user_email):
bucket.default_object_acl.save()
print(
- "Removed user {} from the default acl of bucket {}.".format(
- user_email, bucket_name
- )
+ f"Removed user {user_email} from the default acl of bucket {bucket_name}."
)
diff --git a/samples/snippets/storage_remove_bucket_iam_member.py b/samples/snippets/storage_remove_bucket_iam_member.py
index ef75a1a15..2efc29e30 100644
--- a/samples/snippets/storage_remove_bucket_iam_member.py
+++ b/samples/snippets/storage_remove_bucket_iam_member.py
@@ -38,7 +38,7 @@ def remove_bucket_iam_member(bucket_name, role, member):
bucket.set_iam_policy(policy)
- print("Removed {} with role {} from {}.".format(member, role, bucket_name))
+ print(f"Removed {member} with role {role} from {bucket_name}.")
# [END storage_remove_bucket_iam_member]
diff --git a/samples/snippets/storage_remove_bucket_label.py b/samples/snippets/storage_remove_bucket_label.py
index 58bbfef2d..fc4a5b4e7 100644
--- a/samples/snippets/storage_remove_bucket_label.py
+++ b/samples/snippets/storage_remove_bucket_label.py
@@ -39,7 +39,7 @@ def remove_bucket_label(bucket_name):
bucket.labels = labels
bucket.patch()
- print("Removed labels on {}.".format(bucket.name))
+ print(f"Removed labels on {bucket.name}.")
pprint.pprint(bucket.labels)
diff --git a/samples/snippets/storage_remove_bucket_owner.py b/samples/snippets/storage_remove_bucket_owner.py
index f54e7a7cc..561ba9175 100644
--- a/samples/snippets/storage_remove_bucket_owner.py
+++ b/samples/snippets/storage_remove_bucket_owner.py
@@ -38,7 +38,7 @@ def remove_bucket_owner(bucket_name, user_email):
bucket.acl.user(user_email).revoke_owner()
bucket.acl.save()
- print("Removed user {} from bucket {}.".format(user_email, bucket_name))
+ print(f"Removed user {user_email} from bucket {bucket_name}.")
# [END storage_remove_bucket_owner]
diff --git a/samples/snippets/storage_remove_cors_configuration.py b/samples/snippets/storage_remove_cors_configuration.py
index 48ee74338..ad97371f4 100644
--- a/samples/snippets/storage_remove_cors_configuration.py
+++ b/samples/snippets/storage_remove_cors_configuration.py
@@ -29,7 +29,7 @@ def remove_cors_configuration(bucket_name):
bucket.cors = []
bucket.patch()
- print("Remove CORS policies for bucket {}.".format(bucket.name))
+ print(f"Remove CORS policies for bucket {bucket.name}.")
return bucket
diff --git a/samples/snippets/storage_remove_file_owner.py b/samples/snippets/storage_remove_file_owner.py
index 9db83cce0..315a747ad 100644
--- a/samples/snippets/storage_remove_file_owner.py
+++ b/samples/snippets/storage_remove_file_owner.py
@@ -39,9 +39,7 @@ def remove_blob_owner(bucket_name, blob_name, user_email):
blob.acl.save()
print(
- "Removed user {} from blob {} in bucket {}.".format(
- user_email, blob_name, bucket_name
- )
+ f"Removed user {user_email} from blob {blob_name} in bucket {bucket_name}."
)
diff --git a/samples/snippets/storage_remove_retention_policy.py b/samples/snippets/storage_remove_retention_policy.py
index cb8ee548c..9ede8053a 100644
--- a/samples/snippets/storage_remove_retention_policy.py
+++ b/samples/snippets/storage_remove_retention_policy.py
@@ -37,7 +37,7 @@ def remove_retention_policy(bucket_name):
bucket.retention_period = None
bucket.patch()
- print("Removed bucket {} retention policy".format(bucket.name))
+ print(f"Removed bucket {bucket.name} retention policy")
# [END storage_remove_retention_policy]
diff --git a/samples/snippets/storage_rename_file.py b/samples/snippets/storage_rename_file.py
index b47e18621..1125007c6 100644
--- a/samples/snippets/storage_rename_file.py
+++ b/samples/snippets/storage_rename_file.py
@@ -35,7 +35,7 @@ def rename_blob(bucket_name, blob_name, new_name):
new_blob = bucket.rename_blob(blob, new_name)
- print("Blob {} has been renamed to {}".format(blob.name, new_blob.name))
+ print(f"Blob {blob.name} has been renamed to {new_blob.name}")
# [END storage_rename_file]
diff --git a/samples/snippets/storage_rotate_encryption_key.py b/samples/snippets/storage_rotate_encryption_key.py
index 663ee4796..828b7d5ef 100644
--- a/samples/snippets/storage_rotate_encryption_key.py
+++ b/samples/snippets/storage_rotate_encryption_key.py
@@ -52,7 +52,7 @@ def rotate_encryption_key(
if token is None:
break
- print("Key rotation complete for Blob {}".format(blob_name))
+ print(f"Key rotation complete for Blob {blob_name}")
# [END storage_rotate_encryption_key]
diff --git a/samples/snippets/storage_set_bucket_public_iam.py b/samples/snippets/storage_set_bucket_public_iam.py
index 4b7df89df..0fb33f59c 100644
--- a/samples/snippets/storage_set_bucket_public_iam.py
+++ b/samples/snippets/storage_set_bucket_public_iam.py
@@ -39,7 +39,7 @@ def set_bucket_public_iam(
bucket.set_iam_policy(policy)
- print("Bucket {} is now publicly readable".format(bucket.name))
+ print(f"Bucket {bucket.name} is now publicly readable")
# [END storage_set_bucket_public_iam]
diff --git a/samples/snippets/storage_set_event_based_hold.py b/samples/snippets/storage_set_event_based_hold.py
index 52a89b88e..e04ed7552 100644
--- a/samples/snippets/storage_set_event_based_hold.py
+++ b/samples/snippets/storage_set_event_based_hold.py
@@ -32,7 +32,7 @@ def set_event_based_hold(bucket_name, blob_name):
blob.event_based_hold = True
blob.patch()
- print("Event based hold was set for {}".format(blob_name))
+ print(f"Event based hold was set for {blob_name}")
# [END storage_set_event_based_hold]
diff --git a/samples/snippets/storage_set_metadata.py b/samples/snippets/storage_set_metadata.py
index 07529ac68..90b6838c0 100644
--- a/samples/snippets/storage_set_metadata.py
+++ b/samples/snippets/storage_set_metadata.py
@@ -32,7 +32,7 @@ def set_blob_metadata(bucket_name, blob_name):
blob.metadata = metadata
blob.patch()
- print("The metadata for the blob {} is {}".format(blob.name, blob.metadata))
+ print(f"The metadata for the blob {blob.name} is {blob.metadata}")
# [END storage_set_metadata]
diff --git a/samples/snippets/storage_upload_encrypted_file.py b/samples/snippets/storage_upload_encrypted_file.py
index e7d02c67b..5f4987238 100644
--- a/samples/snippets/storage_upload_encrypted_file.py
+++ b/samples/snippets/storage_upload_encrypted_file.py
@@ -51,9 +51,7 @@ def upload_encrypted_blob(
blob.upload_from_filename(source_file_name)
print(
- "File {} uploaded to {}.".format(
- source_file_name, destination_blob_name
- )
+ f"File {source_file_name} uploaded to {destination_blob_name}."
)
diff --git a/samples/snippets/storage_upload_file.py b/samples/snippets/storage_upload_file.py
index fb02c3632..8e7d98630 100644
--- a/samples/snippets/storage_upload_file.py
+++ b/samples/snippets/storage_upload_file.py
@@ -36,9 +36,7 @@ def upload_blob(bucket_name, source_file_name, destination_blob_name):
blob.upload_from_filename(source_file_name)
print(
- "File {} uploaded to {}.".format(
- source_file_name, destination_blob_name
- )
+ f"File {source_file_name} uploaded to {destination_blob_name}."
)
diff --git a/samples/snippets/storage_upload_from_memory.py b/samples/snippets/storage_upload_from_memory.py
index ee8a9828c..eff3d222a 100644
--- a/samples/snippets/storage_upload_from_memory.py
+++ b/samples/snippets/storage_upload_from_memory.py
@@ -39,9 +39,7 @@ def upload_blob_from_memory(bucket_name, contents, destination_blob_name):
blob.upload_from_string(contents)
print(
- "{} with contents {} uploaded to {}.".format(
- destination_blob_name, contents, bucket_name
- )
+ f"{destination_blob_name} with contents {contents} uploaded to {bucket_name}."
)
# [END storage_file_upload_from_memory]
diff --git a/samples/snippets/storage_upload_from_stream.py b/samples/snippets/storage_upload_from_stream.py
index d43365e08..e2d31a5e3 100644
--- a/samples/snippets/storage_upload_from_stream.py
+++ b/samples/snippets/storage_upload_from_stream.py
@@ -44,9 +44,7 @@ def upload_blob_from_stream(bucket_name, file_obj, destination_blob_name):
blob.upload_from_file(file_obj)
print(
- "Stream data uploaded to {} in bucket {}.".format(
- destination_blob_name, bucket_name
- )
+ f"Stream data uploaded to {destination_blob_name} in bucket {bucket_name}."
)
# [END storage_stream_file_upload]
diff --git a/samples/snippets/storage_view_bucket_iam_members.py b/samples/snippets/storage_view_bucket_iam_members.py
index 5272f0ddb..184a1361f 100644
--- a/samples/snippets/storage_view_bucket_iam_members.py
+++ b/samples/snippets/storage_view_bucket_iam_members.py
@@ -30,7 +30,7 @@ def view_bucket_iam_members(bucket_name):
policy = bucket.get_iam_policy(requested_policy_version=3)
for binding in policy.bindings:
- print("Role: {}, Members: {}".format(binding["role"], binding["members"]))
+ print(f"Role: {binding['role']}, Members: {binding['members']}")
# [END storage_view_bucket_iam_members]
diff --git a/samples/snippets/uniform_bucket_level_access_test.py b/samples/snippets/uniform_bucket_level_access_test.py
index b43fa016f..8b7964038 100644
--- a/samples/snippets/uniform_bucket_level_access_test.py
+++ b/samples/snippets/uniform_bucket_level_access_test.py
@@ -23,7 +23,7 @@ def test_get_uniform_bucket_level_access(bucket, capsys):
)
out, _ = capsys.readouterr()
assert (
- "Uniform bucket-level access is disabled for {}.".format(bucket.name)
+ f"Uniform bucket-level access is disabled for {bucket.name}."
in out
)
@@ -35,7 +35,7 @@ def test_enable_uniform_bucket_level_access(bucket, capsys):
)
out, _ = capsys.readouterr()
assert (
- "Uniform bucket-level access was enabled for {}.".format(bucket.name)
+ f"Uniform bucket-level access was enabled for {bucket.name}."
in out
)
@@ -47,6 +47,6 @@ def test_disable_uniform_bucket_level_access(bucket, capsys):
)
out, _ = capsys.readouterr()
assert (
- "Uniform bucket-level access was disabled for {}.".format(bucket.name)
+ f"Uniform bucket-level access was disabled for {bucket.name}."
in out
)
diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py
index f84131f2f..d8921fb17 100644
--- a/tests/conformance/test_conformance.py
+++ b/tests/conformance/test_conformance.py
@@ -42,9 +42,9 @@
"""The storage testbench docker image info and commands."""
_DEFAULT_IMAGE_NAME = "gcr.io/cloud-devrel-public-resources/storage-testbench"
_DEFAULT_IMAGE_TAG = "latest"
-_DOCKER_IMAGE = "{}:{}".format(_DEFAULT_IMAGE_NAME, _DEFAULT_IMAGE_TAG)
+_DOCKER_IMAGE = f"{_DEFAULT_IMAGE_NAME}:{_DEFAULT_IMAGE_TAG}"
_PULL_CMD = ["docker", "pull", _DOCKER_IMAGE]
-_RUN_CMD = ["docker", "run", "--rm", "-d", "-p", "{}:9000".format(_PORT), _DOCKER_IMAGE]
+_RUN_CMD = ["docker", "run", "--rm", "-d", "-p", f"{_PORT}:9000", _DOCKER_IMAGE]
_CONF_TEST_PROJECT_ID = "my-project-id"
_CONF_TEST_SERVICE_ACCOUNT_EMAIL = (
@@ -846,9 +846,7 @@ def _get_retry_test(host, id):
instructions, and a boolean status "completed". This can be used to verify
if all instructions were used as expected.
"""
- get_retry_test_uri = "{base}{retry}/{id}".format(
- base=host, retry="/retry_test", id=id
- )
+ get_retry_test_uri = f"{host}/retry_test/{id}"
r = requests.get(get_retry_test_uri)
return r.json()
@@ -892,9 +890,7 @@ def _delete_retry_test(host, id):
"""
Delete the Retry Test resource by id.
"""
- get_retry_test_uri = "{base}{retry}/{id}".format(
- base=host, retry="/retry_test", id=id
- )
+ get_retry_test_uri = f"{host}/retry_test/{id}"
requests.delete(get_retry_test_uri)
@@ -926,7 +922,7 @@ def run_test_case(
id = r["id"]
except Exception as e:
raise Exception(
- "Error creating retry test for {}: {}".format(method_name, e)
+ f"Error creating retry test for {method_name}: {e}"
).with_traceback(e.__traceback__)
# Run retry tests on library methods.
@@ -944,7 +940,7 @@ def run_test_case(
)
except Exception as e:
logging.exception(
- "Caught an exception while running retry instructions\n {}".format(e)
+ f"Caught an exception while running retry instructions\n {e}"
)
success_results = False
else:
@@ -990,13 +986,11 @@ def run_test_case(
method_name = m["name"]
method_group = m["group"] if m.get("group", None) else m["name"]
if method_group not in method_mapping:
- logging.info("No tests for operation {}".format(method_name))
+ logging.info(f"No tests for operation {method_name}")
continue
for lib_func in method_mapping[method_group]:
- test_name = "test-S{}-{}-{}-{}".format(
- id, method_name, lib_func.__name__, i
- )
+ test_name = f"test-S{id}-{method_name}-{lib_func.__name__}-{i}"
globals()[test_name] = functools.partial(
run_test_case, id, m, c, lib_func, _HOST
)
diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py
index de1a04aa9..d8796f5b3 100644
--- a/tests/system/test_bucket.py
+++ b/tests/system/test_bucket.py
@@ -153,7 +153,7 @@ def test_bucket_get_set_iam_policy(
policy = bucket.get_iam_policy(requested_policy_version=3)
assert policy == policy_no_version
- member = "serviceAccount:{}".format(storage_client.get_service_account_email())
+ member = f"serviceAccount:{storage_client.get_service_account_email()}"
binding_w_condition = {
"role": STORAGE_OBJECT_VIEWER_ROLE,
diff --git a/tests/system/test_notification.py b/tests/system/test_notification.py
index 59d0dfafd..f52ae3219 100644
--- a/tests/system/test_notification.py
+++ b/tests/system/test_notification.py
@@ -54,7 +54,7 @@ def topic_name():
@pytest.fixture(scope="session")
def topic_path(storage_client, topic_name):
- return "projects/{}/topics/{}".format(storage_client.project, topic_name)
+ return f"projects/{storage_client.project}/topics/{topic_name}"
@pytest.fixture(scope="session")
@@ -64,7 +64,7 @@ def notification_topic(storage_client, publisher_client, topic_path, no_mtls):
binding = policy.bindings.add()
binding.role = "roles/pubsub.publisher"
binding.members.append(
- "serviceAccount:{}".format(storage_client.get_service_account_email())
+ f"serviceAccount:{storage_client.get_service_account_email()}"
)
publisher_client.set_iam_policy(topic_path, policy)
diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py
index 890fd1352..9e7bf216b 100644
--- a/tests/unit/test__http.py
+++ b/tests/unit/test__http.py
@@ -77,7 +77,7 @@ def test_build_api_url_no_extra_query_params(self):
conn = self._make_one(object())
uri = conn.build_api_url("/foo")
scheme, netloc, path, qs, _ = urlsplit(uri)
- self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", conn.API_BASE_URL)
self.assertEqual(path, "/".join(["", "storage", conn.API_VERSION, "foo"]))
parms = dict(parse_qsl(qs))
pretty_print = parms.pop("prettyPrint", "false")
@@ -92,7 +92,7 @@ def test_build_api_url_w_custom_endpoint(self):
conn = self._make_one(object(), api_endpoint=custom_endpoint)
uri = conn.build_api_url("/foo")
scheme, netloc, path, qs, _ = urlsplit(uri)
- self.assertEqual("%s://%s" % (scheme, netloc), custom_endpoint)
+ self.assertEqual(f"{scheme}://{netloc}", custom_endpoint)
self.assertEqual(path, "/".join(["", "storage", conn.API_VERSION, "foo"]))
parms = dict(parse_qsl(qs))
pretty_print = parms.pop("prettyPrint", "false")
@@ -106,7 +106,7 @@ def test_build_api_url_w_extra_query_params(self):
conn = self._make_one(object())
uri = conn.build_api_url("/foo", {"bar": "baz"})
scheme, netloc, path, qs, _ = urlsplit(uri)
- self.assertEqual("%s://%s" % (scheme, netloc), conn.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", conn.API_BASE_URL)
self.assertEqual(path, "/".join(["", "storage", conn.API_VERSION, "foo"]))
parms = dict(parse_qsl(qs))
self.assertEqual(parms["bar"], "baz")
@@ -246,7 +246,7 @@ def test_duplicate_user_agent(self):
client_info = ClientInfo(user_agent="test/123")
conn = self._make_one(object(), client_info=client_info)
- expected_user_agent = "test/123 gcloud-python/{} ".format(__version__)
+ expected_user_agent = f"test/123 gcloud-python/{__version__} "
self.assertEqual(conn._client_info.user_agent, expected_user_agent)
client = mock.Mock(_connection=conn, spec=["_connection"])
diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py
index 48c9a00e1..a7fed514d 100644
--- a/tests/unit/test__signing.py
+++ b/tests/unit/test__signing.py
@@ -326,7 +326,7 @@ def test_w_query_parameters(self):
query_parameters = {"foo": "bar", "baz": "qux"}
canonical = self._call_fut(method, resource, query_parameters, None)
self.assertEqual(canonical.method, method)
- self.assertEqual(canonical.resource, "{}?baz=qux&foo=bar".format(resource))
+ self.assertEqual(canonical.resource, f"{resource}?baz=qux&foo=bar")
self.assertEqual(canonical.query_parameters, [("baz", "qux"), ("foo", "bar")])
self.assertEqual(canonical.headers, [])
@@ -399,7 +399,7 @@ def _generate_helper(
for key, value in query_parameters.items()
}
expected_qp = urlencode(sorted(normalized_qp.items()))
- expected_resource = "{}?{}".format(resource, expected_qp)
+ expected_resource = f"{resource}?{expected_qp}"
elements.append(content_md5 or "")
elements.append(content_type or "")
@@ -568,9 +568,7 @@ def _generate_helper(
self.assertEqual(params["X-Goog-Algorithm"], "GOOG4-RSA-SHA256")
now_date = now.date().strftime("%Y%m%d")
- expected_cred = "{}/{}/auto/storage/goog4_request".format(
- signer_email, now_date
- )
+ expected_cred = f"{signer_email}/{now_date}/auto/storage/goog4_request"
self.assertEqual(params["X-Goog-Credential"], expected_cred)
now_stamp = now.strftime("%Y%m%dT%H%M%SZ")
@@ -859,7 +857,7 @@ def test_conformance_bucket(test_data):
resource = "/"
_run_conformance_test(resource, test_data, _API_ACCESS_ENDPOINT)
else:
- resource = "/{}".format(test_data["bucket"])
+ resource = f"/{test_data['bucket']}"
_run_conformance_test(resource, test_data)
@@ -876,14 +874,12 @@ def test_conformance_blob(test_data):
# For the VIRTUAL_HOSTED_STYLE
else:
_API_ACCESS_ENDPOINT = (
- "{scheme}://{bucket_name}.storage.googleapis.com".format(
- scheme=test_data["scheme"], bucket_name=test_data["bucket"]
- )
+ f"{test_data['scheme']}://{test_data['bucket']}.storage.googleapis.com"
)
- resource = "/{}".format(test_data["object"])
+ resource = f"/{test_data['object']}"
_run_conformance_test(resource, test_data, _API_ACCESS_ENDPOINT)
else:
- resource = "/{}/{}".format(test_data["bucket"], test_data["object"])
+ resource = f"/{test_data['bucket']}/{test_data['object']}"
_run_conformance_test(resource, test_data)
diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py
index 6083ef1e1..3c5e6515a 100644
--- a/tests/unit/test_acl.py
+++ b/tests/unit/test_acl.py
@@ -56,7 +56,7 @@ def test___str__w_identifier(self):
TYPE = "type"
ID = "id"
entity = self._make_one(TYPE, ID)
- self.assertEqual(str(entity), "%s-%s" % (TYPE, ID))
+ self.assertEqual(str(entity), f"{TYPE}-{ID}")
def test_grant_simple(self):
TYPE = "type"
@@ -229,7 +229,7 @@ def test___iter___non_empty_w_roles(self):
acl.loaded = True
entity = acl.entity(TYPE, ID)
entity.grant(ROLE)
- self.assertEqual(list(acl), [{"entity": "%s-%s" % (TYPE, ID), "role": ROLE}])
+ self.assertEqual(list(acl), [{"entity": f"{TYPE}-{ID}", "role": ROLE}])
def test___iter___non_empty_w_empty_role(self):
TYPE = "type"
@@ -313,7 +313,7 @@ def test_has_entity_hit_str(self):
acl = self._make_one()
acl.loaded = True
acl.entity(TYPE, ID)
- self.assertTrue(acl.has_entity("%s-%s" % (TYPE, ID)))
+ self.assertTrue(acl.has_entity(f"{TYPE}-{ID}"))
def test_has_entity_hit_entity(self):
TYPE = "type"
@@ -371,7 +371,7 @@ def test_get_entity_hit_str(self):
acl = self._make_one()
acl.loaded = True
acl.entity(TYPE, ID)
- self.assertTrue(acl.has_entity("%s-%s" % (TYPE, ID)))
+ self.assertTrue(acl.has_entity(f"{TYPE}-{ID}"))
def test_get_entity_hit_entity(self):
TYPE = "type"
@@ -422,7 +422,7 @@ def test_add_entity_hit(self):
TYPE = "type"
ID = "id"
- ENTITY_VAL = "%s-%s" % (TYPE, ID)
+ ENTITY_VAL = f"{TYPE}-{ID}"
ROLE = "role"
entity = _ACLEntity(TYPE, ID)
entity.grant(ROLE)
@@ -470,7 +470,7 @@ def test_user(self):
entity.grant(ROLE)
self.assertEqual(entity.type, "user")
self.assertEqual(entity.identifier, ID)
- self.assertEqual(list(acl), [{"entity": "user-%s" % ID, "role": ROLE}])
+ self.assertEqual(list(acl), [{"entity": f"user-{ID}", "role": ROLE}])
def test_group(self):
ID = "id"
@@ -481,7 +481,7 @@ def test_group(self):
entity.grant(ROLE)
self.assertEqual(entity.type, "group")
self.assertEqual(entity.identifier, ID)
- self.assertEqual(list(acl), [{"entity": "group-%s" % ID, "role": ROLE}])
+ self.assertEqual(list(acl), [{"entity": f"group-{ID}", "role": ROLE}])
def test_domain(self):
ID = "id"
@@ -492,7 +492,7 @@ def test_domain(self):
entity.grant(ROLE)
self.assertEqual(entity.type, "domain")
self.assertEqual(entity.identifier, ID)
- self.assertEqual(list(acl), [{"entity": "domain-%s" % ID, "role": ROLE}])
+ self.assertEqual(list(acl), [{"entity": f"domain-{ID}", "role": ROLE}])
def test_all(self):
ROLE = "role"
@@ -1003,8 +1003,8 @@ def test_ctor(self):
self.assertEqual(acl.entities, {})
self.assertFalse(acl.loaded)
self.assertIs(acl.bucket, bucket)
- self.assertEqual(acl.reload_path, "/b/%s/acl" % NAME)
- self.assertEqual(acl.save_path, "/b/%s" % NAME)
+ self.assertEqual(acl.reload_path, f"/b/{NAME}/acl")
+ self.assertEqual(acl.save_path, f"/b/{NAME}")
def test_user_project(self):
NAME = "name"
@@ -1033,8 +1033,8 @@ def test_ctor(self):
self.assertEqual(acl.entities, {})
self.assertFalse(acl.loaded)
self.assertIs(acl.bucket, bucket)
- self.assertEqual(acl.reload_path, "/b/%s/defaultObjectAcl" % NAME)
- self.assertEqual(acl.save_path, "/b/%s" % NAME)
+ self.assertEqual(acl.reload_path, f"/b/{NAME}/defaultObjectAcl")
+ self.assertEqual(acl.save_path, f"/b/{NAME}")
class Test_ObjectACL(unittest.TestCase):
@@ -1056,8 +1056,8 @@ def test_ctor(self):
self.assertEqual(acl.entities, {})
self.assertFalse(acl.loaded)
self.assertIs(acl.blob, blob)
- self.assertEqual(acl.reload_path, "/b/%s/o/%s/acl" % (NAME, BLOB_NAME))
- self.assertEqual(acl.save_path, "/b/%s/o/%s" % (NAME, BLOB_NAME))
+ self.assertEqual(acl.reload_path, f"/b/{NAME}/o/{BLOB_NAME}/acl")
+ self.assertEqual(acl.save_path, f"/b/{NAME}/o/{BLOB_NAME}")
def test_user_project(self):
NAME = "name"
@@ -1081,7 +1081,7 @@ def __init__(self, bucket, blob):
@property
def path(self):
- return "%s/o/%s" % (self.bucket.path, self.blob)
+ return f"{self.bucket.path}/o/{self.blob}"
class _Bucket(object):
@@ -1093,4 +1093,4 @@ def __init__(self, name):
@property
def path(self):
- return "/b/%s" % self.name
+ return f"/b/{self.name}"
diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py
index 8b347fcf8..72b54769f 100644
--- a/tests/unit/test_batch.py
+++ b/tests/unit/test_batch.py
@@ -280,7 +280,7 @@ def _check_subrequest_no_payload(self, chunk, method, url):
self.assertEqual(lines[1], "Content-Type: application/http")
self.assertEqual(lines[2], "MIME-Version: 1.0")
self.assertEqual(lines[3], "")
- self.assertEqual(lines[4], "%s %s HTTP/1.1" % (method, url))
+ self.assertEqual(lines[4], f"{method} {url} HTTP/1.1")
self.assertEqual(lines[5], "")
self.assertEqual(lines[6], "")
@@ -294,14 +294,14 @@ def _check_subrequest_payload(self, chunk, method, url, payload):
self.assertEqual(lines[1], "Content-Type: application/http")
self.assertEqual(lines[2], "MIME-Version: 1.0")
self.assertEqual(lines[3], "")
- self.assertEqual(lines[4], "%s %s HTTP/1.1" % (method, url))
+ self.assertEqual(lines[4], f"{method} {url} HTTP/1.1")
if method == "GET":
self.assertEqual(len(lines), 7)
self.assertEqual(lines[5], "")
self.assertEqual(lines[6], "")
else:
self.assertEqual(len(lines), 9)
- self.assertEqual(lines[5], "Content-Length: %d" % len(payload_str))
+ self.assertEqual(lines[5], f"Content-Length: {len(payload_str)}")
self.assertEqual(lines[6], "Content-Type: application/json")
self.assertEqual(lines[7], "")
self.assertEqual(json.loads(lines[8]), payload)
@@ -352,7 +352,7 @@ def test_finish_nonempty(self):
self.assertEqual(response3.headers, {"Content-Length": "0"})
self.assertEqual(response3.status_code, NO_CONTENT)
- expected_url = "{}/batch/storage/v1".format(batch.API_BASE_URL)
+ expected_url = f"{batch.API_BASE_URL}/batch/storage/v1"
http.request.assert_called_once_with(
method="POST",
url=expected_url,
@@ -422,7 +422,7 @@ def test_finish_nonempty_with_status_failure(self):
self.assertEqual(target1._properties, {"foo": 1, "bar": 2})
self.assertIs(target2._properties, target2_future_before)
- expected_url = "{}/batch/storage/v1".format(batch.API_BASE_URL)
+ expected_url = f"{batch.API_BASE_URL}/batch/storage/v1"
http.request.assert_called_once_with(
method="POST",
url=expected_url,
diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py
index 8c86c002e..07d0e19c8 100644
--- a/tests/unit/test_blob.py
+++ b/tests/unit/test_blob.py
@@ -140,7 +140,7 @@ def _set_properties_helper(self, kms_key_name=None):
NOW = now.strftime(_RFC3339_MICROS)
BLOB_NAME = "blob-name"
GENERATION = 12345
- BLOB_ID = "name/{}/{}".format(BLOB_NAME, GENERATION)
+ BLOB_ID = f"name/{BLOB_NAME}/{GENERATION}"
SELF_LINK = "http://example.com/self/"
METAGENERATION = 23456
SIZE = 12345
@@ -321,7 +321,7 @@ def test_path_normal(self):
BLOB_NAME = "blob-name"
bucket = _Bucket()
blob = self._make_one(BLOB_NAME, bucket=bucket)
- self.assertEqual(blob.path, "/b/name/o/%s" % BLOB_NAME)
+ self.assertEqual(blob.path, f"/b/name/o/{BLOB_NAME}")
def test_path_w_slash_in_name(self):
BLOB_NAME = "parent/child"
@@ -402,7 +402,7 @@ def test_public_url(self):
bucket = _Bucket()
blob = self._make_one(BLOB_NAME, bucket=bucket)
self.assertEqual(
- blob.public_url, "https://storage.googleapis.com/name/%s" % BLOB_NAME
+ blob.public_url, f"https://storage.googleapis.com/name/{BLOB_NAME}"
)
def test_public_url_w_slash_in_name(self):
@@ -486,9 +486,7 @@ def _generate_signed_url_helper(
else:
effective_version = version
- to_patch = "google.cloud.storage.blob.generate_signed_url_{}".format(
- effective_version
- )
+ to_patch = f"google.cloud.storage.blob.generate_signed_url_{effective_version}"
with mock.patch(to_patch) as signer:
signed_uri = blob.generate_signed_url(
@@ -525,10 +523,10 @@ def _generate_signed_url_helper(
)
else:
expected_api_access_endpoint = api_access_endpoint
- expected_resource = "/{}/{}".format(bucket.name, quoted_name)
+ expected_resource = f"/{bucket.name}/{quoted_name}"
if virtual_hosted_style or bucket_bound_hostname:
- expected_resource = "/{}".format(quoted_name)
+ expected_resource = f"/{quoted_name}"
if encryption_key is not None:
expected_headers = headers or {}
@@ -946,7 +944,7 @@ def test__get_download_url_with_generation_match(self):
)
self.assertEqual(
download_url,
- "{}?ifGenerationMatch={}".format(MEDIA_LINK, GENERATION_NUMBER),
+ f"{MEDIA_LINK}?ifGenerationMatch={GENERATION_NUMBER}",
)
def test__get_download_url_with_media_link_w_user_project(self):
@@ -963,7 +961,7 @@ def test__get_download_url_with_media_link_w_user_project(self):
download_url = blob._get_download_url(client)
self.assertEqual(
- download_url, "{}?userProject={}".format(media_link, user_project)
+ download_url, f"{media_link}?userProject={user_project}"
)
def test__get_download_url_on_the_fly(self):
@@ -1920,7 +1918,7 @@ def _download_as_text_helper(
properties = {}
if charset is not None:
- properties["contentType"] = "text/plain; charset={}".format(charset)
+ properties["contentType"] = f"text/plain; charset={charset}"
elif no_charset:
properties = {"contentType": "text/plain"}
@@ -2815,7 +2813,7 @@ def _make_resumable_transport(
fake_response2 = self._mock_requests_response(
resumable_media.PERMANENT_REDIRECT, headers2
)
- json_body = '{{"size": "{:d}"}}'.format(total_bytes)
+ json_body = f'{{"size": "{total_bytes:d}"}}'
if data_corruption:
fake_response3 = resumable_media.DataCorruption(None)
else:
@@ -2847,7 +2845,7 @@ def _do_resumable_upload_call0(
+ "/o?uploadType=resumable"
)
if predefined_acl is not None:
- upload_url += "&predefinedAcl={}".format(predefined_acl)
+ upload_url += f"&predefinedAcl={predefined_acl}"
expected_headers = _get_default_headers(
client._connection.user_agent, x_upload_content_type=content_type
)
@@ -2875,9 +2873,9 @@ def _do_resumable_upload_call1(
):
# Second mock transport.request() does sends first chunk.
if size is None:
- content_range = "bytes 0-{:d}/*".format(blob.chunk_size - 1)
+ content_range = f"bytes 0-{blob.chunk_size - 1:d}/*"
else:
- content_range = "bytes 0-{:d}/{:d}".format(blob.chunk_size - 1, size)
+ content_range = f"bytes 0-{blob.chunk_size - 1:d}/{size:d}"
expected_headers = {
**_get_default_headers(
@@ -2911,9 +2909,7 @@ def _do_resumable_upload_call2(
timeout=None,
):
# Third mock transport.request() does sends last chunk.
- content_range = "bytes {:d}-{:d}/{:d}".format(
- blob.chunk_size, total_bytes - 1, total_bytes
- )
+ content_range = f"bytes {blob.chunk_size:d}-{total_bytes - 1:d}/{total_bytes:d}"
expected_headers = {
**_get_default_headers(
client._connection.user_agent, x_upload_content_type=content_type
@@ -2965,7 +2961,7 @@ def _do_resumable_helper(
}
headers2 = {
**_get_default_headers(USER_AGENT, content_type),
- "range": "bytes=0-{:d}".format(CHUNK_SIZE - 1),
+ "range": f"bytes=0-{CHUNK_SIZE - 1:d}",
}
headers3 = _get_default_headers(USER_AGENT, content_type)
transport, responses = self._make_resumable_transport(
@@ -3721,7 +3717,7 @@ def test_get_iam_policy_defaults(self):
from google.api_core.iam import Policy
blob_name = "blob-name"
- path = "/b/name/o/%s" % (blob_name,)
+ path = f"/b/name/o/{blob_name}"
etag = "DEADBEEF"
version = 1
owner1 = "user:phred@example.com"
@@ -3756,7 +3752,7 @@ def test_get_iam_policy_defaults(self):
self.assertEqual(policy.version, api_response["version"])
self.assertEqual(dict(policy), expected_policy)
- expected_path = "%s/iam" % (path,)
+ expected_path = f"{path}/iam"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3772,7 +3768,7 @@ def test_get_iam_policy_w_user_project_w_timeout(self):
blob_name = "blob-name"
user_project = "user-project-123"
timeout = 42
- path = "/b/name/o/%s" % (blob_name,)
+ path = f"/b/name/o/{blob_name}"
etag = "DEADBEEF"
version = 1
api_response = {
@@ -3794,7 +3790,7 @@ def test_get_iam_policy_w_user_project_w_timeout(self):
self.assertEqual(policy.version, api_response["version"])
self.assertEqual(dict(policy), expected_policy)
- expected_path = "%s/iam" % (path,)
+ expected_path = f"{path}/iam"
expected_query_params = {"userProject": user_project}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3808,7 +3804,7 @@ def test_get_iam_policy_w_requested_policy_version(self):
from google.cloud.storage.iam import STORAGE_OWNER_ROLE
blob_name = "blob-name"
- path = "/b/name/o/%s" % (blob_name,)
+ path = f"/b/name/o/{blob_name}"
etag = "DEADBEEF"
version = 3
owner1 = "user:phred@example.com"
@@ -3828,7 +3824,7 @@ def test_get_iam_policy_w_requested_policy_version(self):
self.assertEqual(policy.version, version)
- expected_path = "%s/iam" % (path,)
+ expected_path = f"{path}/iam"
expected_query_params = {"optionsRequestedPolicyVersion": version}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3846,7 +3842,7 @@ def test_set_iam_policy(self):
from google.api_core.iam import Policy
blob_name = "blob-name"
- path = "/b/name/o/%s" % (blob_name,)
+ path = f"/b/name/o/{blob_name}"
etag = "DEADBEEF"
version = 1
owner1 = "user:phred@example.com"
@@ -3876,7 +3872,7 @@ def test_set_iam_policy(self):
self.assertEqual(returned.version, version)
self.assertEqual(dict(returned), dict(policy))
- expected_path = "%s/iam" % (path,)
+ expected_path = f"{path}/iam"
expected_data = {
"resourceId": path,
"bindings": mock.ANY,
@@ -3904,7 +3900,7 @@ def test_set_iam_policy_w_user_project_w_explicit_client_w_timeout_retry(self):
blob_name = "blob-name"
user_project = "user-project-123"
- path = "/b/name/o/%s" % (blob_name,)
+ path = f"/b/name/o/{blob_name}"
etag = "DEADBEEF"
version = 1
bindings = []
@@ -3929,7 +3925,7 @@ def test_set_iam_policy_w_user_project_w_explicit_client_w_timeout_retry(self):
self.assertEqual(returned.version, version)
self.assertEqual(dict(returned), dict(policy))
- expected_path = "%s/iam" % (path,)
+ expected_path = f"{path}/iam"
expected_data = { # bindings omitted
"resourceId": path,
}
@@ -3965,7 +3961,7 @@ def test_test_iam_permissions_defaults(self):
self.assertEqual(found, expected)
- expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,)
+ expected_path = f"/b/name/o/{blob_name}/iam/testPermissions"
expected_query_params = {"permissions": permissions}
client._get_resource.assert_called_once_with(
expected_path,
@@ -4000,7 +3996,7 @@ def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self):
self.assertEqual(found, expected)
- expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,)
+ expected_path = f"/b/name/o/{blob_name}/iam/testPermissions"
expected_query_params = {
"permissions": permissions,
"userProject": user_project,
@@ -4190,7 +4186,7 @@ def test_compose_wo_content_type_set(self):
self.assertIsNone(destination.content_type)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1.name, "generation": source_1.generation},
@@ -4227,7 +4223,7 @@ def test_compose_minimal_w_user_project_w_timeout(self):
self.assertEqual(destination.etag, "DEADBEEF")
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1.name, "generation": source_1.generation},
@@ -4265,7 +4261,7 @@ def test_compose_w_additional_property_changes_w_retry(self):
self.assertEqual(destination.etag, "DEADBEEF")
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1.name, "generation": source_1.generation},
@@ -4306,7 +4302,7 @@ def test_compose_w_source_generation_match(self):
if_source_generation_match=source_generation_numbers,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{
@@ -4374,7 +4370,7 @@ def test_compose_w_source_generation_match_nones(self):
if_source_generation_match=source_generation_numbers,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{
@@ -4416,7 +4412,7 @@ def test_compose_w_generation_match(self):
if_generation_match=generation_number,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1.name, "generation": source_1.generation},
@@ -4456,7 +4452,7 @@ def test_compose_w_if_generation_match_list_w_warning(self, mock_warn):
if_generation_match=generation_numbers,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{
@@ -4542,7 +4538,7 @@ def test_compose_w_if_metageneration_match_list_w_warning(self, mock_warn):
if_metageneration_match=metageneration_number,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1_name, "generation": None},
@@ -4584,7 +4580,7 @@ def test_compose_w_metageneration_match(self):
if_metageneration_match=metageneration_number,
)
- expected_path = "/b/name/o/%s/compose" % destination_name
+ expected_path = f"/b/name/o/{destination_name}/compose"
expected_data = {
"sourceObjects": [
{"name": source_1.name, "generation": source_1.generation},
@@ -4830,7 +4826,7 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self):
self.assertEqual(rewritten, bytes_rewritten)
self.assertEqual(size, object_size)
- expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name)
+ expected_path = f"/b/name/o/{blob_name}/rewriteTo/b/name/o/{blob_name}"
expected_query_params = {"userProject": user_project}
expected_data = {}
expected_headers = {
@@ -4878,7 +4874,7 @@ def test_rewrite_same_name_no_key_new_key_w_token(self):
self.assertEqual(rewritten, bytes_rewritten)
self.assertEqual(size, object_size)
- expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name)
+ expected_path = f"/b/name/o/{blob_name}/rewriteTo/b/name/o/{blob_name}"
expected_data = {}
expected_query_params = {"rewriteToken": previous_token}
expected_headers = {
@@ -4930,7 +4926,7 @@ def test_rewrite_same_name_w_old_key_new_kms_key(self):
self.assertEqual(rewritten, bytes_rewritten)
self.assertEqual(size, object_size)
- expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name)
+ expected_path = f"/b/name/o/{blob_name}/rewriteTo/b/name/o/{blob_name}"
expected_data = {"kmsKeyName": dest_kms_resource}
expected_query_params = {"destinationKmsKeyName": dest_kms_resource}
expected_headers = {
@@ -5799,7 +5795,7 @@ def _helper(self, message, code=http.client.BAD_REQUEST, reason=None, args=()):
def test_default(self):
message = "Failure"
exc_info = self._helper(message)
- expected = "GET http://example.com/: {}".format(message)
+ expected = f"GET http://example.com/: {message}"
self.assertEqual(exc_info.exception.message, expected)
self.assertEqual(exc_info.exception.errors, [])
@@ -5831,17 +5827,17 @@ def test_w_empty_list(self):
def test_wo_existing_qs(self):
BASE_URL = "https://test.example.com/base"
NV_LIST = [("one", "One"), ("two", "Two")]
- expected = "&".join(["{}={}".format(name, value) for name, value in NV_LIST])
+ expected = "&".join([f"{name}={value}" for name, value in NV_LIST])
self.assertEqual(
- self._call_fut(BASE_URL, NV_LIST), "{}?{}".format(BASE_URL, expected)
+ self._call_fut(BASE_URL, NV_LIST), f"{BASE_URL}?{expected}"
)
def test_w_existing_qs(self):
BASE_URL = "https://test.example.com/base?one=Three"
NV_LIST = [("one", "One"), ("two", "Two")]
- expected = "&".join(["{}={}".format(name, value) for name, value in NV_LIST])
+ expected = "&".join([f"{name}={value}" for name, value in NV_LIST])
self.assertEqual(
- self._call_fut(BASE_URL, NV_LIST), "{}&{}".format(BASE_URL, expected)
+ self._call_fut(BASE_URL, NV_LIST), f"{BASE_URL}&{expected}"
)
diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py
index eb402de9e..f253db3e1 100644
--- a/tests/unit/test_bucket.py
+++ b/tests/unit/test_bucket.py
@@ -902,7 +902,7 @@ def test_path_no_name(self):
def test_path_w_name(self):
NAME = "name"
bucket = self._make_one(name=NAME)
- self.assertEqual(bucket.path, "/b/%s" % NAME)
+ self.assertEqual(bucket.path, f"/b/{NAME}")
def test_get_blob_miss_w_defaults(self):
from google.cloud.exceptions import NotFound
@@ -918,7 +918,7 @@ def test_get_blob_miss_w_defaults(self):
self.assertIsNone(result)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -952,7 +952,7 @@ def test_get_blob_hit_w_user_project(self):
self.assertIs(blob.bucket, bucket)
self.assertEqual(blob.name, blob_name)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"userProject": user_project,
"projection": "noAcl",
@@ -986,7 +986,7 @@ def test_get_blob_hit_w_generation_w_timeout(self):
self.assertEqual(blob.name, blob_name)
self.assertEqual(blob.generation, generation)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"generation": generation,
"projection": "noAcl",
@@ -1020,7 +1020,7 @@ def test_get_blob_w_etag_match_w_retry(self):
self.assertEqual(blob.name, blob_name)
self.assertEqual(blob.etag, etag)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"projection": "noAcl",
}
@@ -1055,7 +1055,7 @@ def test_get_blob_w_generation_match_w_retry(self):
self.assertEqual(blob.name, blob_name)
self.assertEqual(blob.generation, generation)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"ifGenerationMatch": generation,
"projection": "noAcl",
@@ -1093,7 +1093,7 @@ def test_get_blob_hit_with_kwargs_w_explicit_client(self):
self.assertEqual(blob.chunk_size, chunk_size)
self.assertEqual(blob._encryption_key, key)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"projection": "noAcl",
}
@@ -1218,7 +1218,7 @@ def test_list_notifications_w_defaults(self):
self.assertIs(iterator, client._list_resource.return_value)
self.assertIs(iterator.bucket, bucket)
- expected_path = "/b/{}/notificationConfigs".format(bucket_name)
+ expected_path = f"/b/{bucket_name}/notificationConfigs"
expected_item_to_value = _item_to_notification
client._list_resource.assert_called_once_with(
expected_path,
@@ -1246,7 +1246,7 @@ def test_list_notifications_w_explicit(self):
self.assertIs(iterator, other_client._list_resource.return_value)
self.assertIs(iterator.bucket, bucket)
- expected_path = "/b/{}/notificationConfigs".format(bucket_name)
+ expected_path = f"/b/{bucket_name}/notificationConfigs"
expected_item_to_value = _item_to_notification
other_client._list_resource.assert_called_once_with(
expected_path,
@@ -1270,7 +1270,7 @@ def test_get_notification_miss_w_defaults(self):
with self.assertRaises(NotFound):
bucket.get_notification(notification_id=notification_id)
- expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id)
+ expected_path = f"/b/{name}/notificationConfigs/{notification_id}"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -1319,7 +1319,7 @@ def test_get_notification_hit_w_explicit_w_user_project(self):
self.assertIsNone(notification.blob_name_prefix)
self.assertEqual(notification.payload_format, JSON_API_V1_PAYLOAD_FORMAT)
- expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id)
+ expected_path = f"/b/{name}/notificationConfigs/{notification_id}"
expected_query_params = {"userProject": user_project}
client._get_resource.assert_called_once_with(
expected_path,
@@ -1519,7 +1519,7 @@ def test_delete_blob_miss_w_defaults(self):
with self.assertRaises(NotFound):
bucket.delete_blob(blob_name)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {}
client._delete_resource.assert_called_once_with(
expected_path,
@@ -1542,7 +1542,7 @@ def test_delete_blob_hit_w_user_project_w_timeout(self):
self.assertIsNone(result)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {"userProject": user_project}
client._delete_resource.assert_called_once_with(
expected_path,
@@ -1565,7 +1565,7 @@ def test_delete_blob_hit_w_generation_w_retry(self):
self.assertIsNone(result)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {"generation": generation}
client._delete_resource.assert_called_once_with(
expected_path,
@@ -1592,7 +1592,7 @@ def test_delete_blob_hit_w_generation_match(self):
self.assertIsNone(result)
- expected_path = "/b/%s/o/%s" % (name, blob_name)
+ expected_path = f"/b/{name}/o/{blob_name}"
expected_query_params = {
"ifGenerationMatch": generation,
"ifMetagenerationMatch": metageneration,
@@ -1811,7 +1811,7 @@ def test_reload_w_etag_match(self):
bucket.reload(if_etag_match=etag)
- expected_path = "/b/%s" % (name,)
+ expected_path = f"/b/{name}"
expected_query_params = {
"projection": "noAcl",
}
@@ -1837,7 +1837,7 @@ def test_reload_w_metageneration_match(self):
bucket.reload(if_metageneration_match=metageneration_number)
- expected_path = "/b/%s" % (name,)
+ expected_path = f"/b/{name}"
expected_query_params = {
"projection": "noAcl",
"ifMetagenerationMatch": metageneration_number,
@@ -1899,7 +1899,7 @@ def _make_blob(bucket_name, blob_name):
blob = mock.create_autospec(Blob)
blob.name = blob_name
- blob.path = "/b/{}/o/{}".format(bucket_name, blob_name)
+ blob.path = f"/b/{bucket_name}/o/{blob_name}"
return blob
def test_copy_blobs_wo_name(self):
@@ -2048,7 +2048,7 @@ def test_copy_blob_w_preserve_acl_false_w_explicit_client(self):
_target_object=new_blob,
)
- expected_patch_path = "/b/{}/o/{}".format(dest_name, new_name)
+ expected_patch_path = f"/b/{dest_name}/o/{new_name}"
expected_patch_data = {"acl": []}
expected_patch_query_params = {"projection": "full"}
client._patch_resource.assert_called_once_with(
@@ -2960,7 +2960,7 @@ def test_get_iam_policy_defaults(self):
from google.api_core.iam import Policy
bucket_name = "name"
- path = "/b/%s" % (bucket_name,)
+ path = f"/b/{bucket_name}"
etag = "DEADBEEF"
version = 1
owner1 = "user:phred@example.com"
@@ -2994,7 +2994,7 @@ def test_get_iam_policy_defaults(self):
self.assertEqual(policy.version, api_response["version"])
self.assertEqual(dict(policy), expected_policy)
- expected_path = "/b/%s/iam" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}/iam"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3010,7 +3010,7 @@ def test_get_iam_policy_w_user_project_w_timeout(self):
bucket_name = "name"
timeout = 42
user_project = "user-project-123"
- path = "/b/%s" % (bucket_name,)
+ path = f"/b/{bucket_name}"
etag = "DEADBEEF"
version = 1
api_response = {
@@ -3033,7 +3033,7 @@ def test_get_iam_policy_w_user_project_w_timeout(self):
self.assertEqual(policy.version, api_response["version"])
self.assertEqual(dict(policy), expected_policy)
- expected_path = "/b/%s/iam" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}/iam"
expected_query_params = {"userProject": user_project}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3047,7 +3047,7 @@ def test_get_iam_policy_w_requested_policy_version_w_retry(self):
from google.cloud.storage.iam import STORAGE_OWNER_ROLE
bucket_name = "name"
- path = "/b/%s" % (bucket_name,)
+ path = f"/b/{bucket_name}"
etag = "DEADBEEF"
version = 3
owner1 = "user:phred@example.com"
@@ -3067,7 +3067,7 @@ def test_get_iam_policy_w_requested_policy_version_w_retry(self):
self.assertEqual(policy.version, version)
- expected_path = "/b/%s/iam" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}/iam"
expected_query_params = {"optionsRequestedPolicyVersion": version}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3113,7 +3113,7 @@ def test_set_iam_policy_w_defaults(self):
self.assertEqual(returned.version, version)
self.assertEqual(dict(returned), dict(policy))
- expected_path = "%s/iam" % (bucket.path,)
+ expected_path = f"{bucket.path}/iam"
expected_data = {
"resourceId": bucket.path,
"bindings": mock.ANY,
@@ -3177,7 +3177,7 @@ def test_set_iam_policy_w_user_project_w_expl_client_w_timeout_retry(self):
self.assertEqual(returned.version, version)
self.assertEqual(dict(returned), dict(policy))
- expected_path = "%s/iam" % (bucket.path,)
+ expected_path = f"{bucket.path}/iam"
expected_data = {
"resourceId": bucket.path,
"bindings": mock.ANY,
@@ -3221,7 +3221,7 @@ def test_test_iam_permissions_defaults(self):
self.assertEqual(found, expected)
- expected_path = "/b/%s/iam/testPermissions" % (name,)
+ expected_path = f"/b/{name}/iam/testPermissions"
expected_query_params = {}
expected_query_params = {"permissions": permissions}
client._get_resource.assert_called_once_with(
@@ -3256,7 +3256,7 @@ def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self):
self.assertEqual(found, expected)
- expected_path = "/b/%s/iam/testPermissions" % (name,)
+ expected_path = f"/b/{name}/iam/testPermissions"
expected_query_params = {
"permissions": permissions,
"userProject": user_project,
@@ -3369,7 +3369,7 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True):
)
if not default_object_acl_loaded:
- expected_path = "/b/%s/defaultObjectAcl" % (name,)
+ expected_path = f"/b/{name}/defaultObjectAcl"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3581,7 +3581,7 @@ def _make_private_w_future_helper(self, default_object_acl_loaded=True):
)
if not default_object_acl_loaded:
- expected_path = "/b/%s/defaultObjectAcl" % (name,)
+ expected_path = f"/b/{name}/defaultObjectAcl"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -3724,9 +3724,7 @@ def _generate_upload_policy_helper(self, **kwargs):
break
else: # pragma: NO COVER
self.fail(
- "Condition {} not found in {}".format(
- expected_condition, policy_conditions
- )
+ f"Condition {expected_condition} not found in {policy_conditions}"
)
return policy_fields, policy
@@ -3831,7 +3829,7 @@ def test_lock_retention_policy_ok_w_timeout_w_retry(self):
bucket.lock_retention_policy(timeout=timeout, retry=retry)
- expected_path = "/b/{}/lockRetentionPolicy".format(name)
+ expected_path = f"/b/{name}/lockRetentionPolicy"
expected_data = None
expected_query_params = {"ifMetagenerationMatch": metageneration}
client._post_resource.assert_called_once_with(
@@ -3869,7 +3867,7 @@ def test_lock_retention_policy_w_user_project(self):
bucket.lock_retention_policy()
- expected_path = "/b/{}/lockRetentionPolicy".format(name)
+ expected_path = f"/b/{name}/lockRetentionPolicy"
expected_data = None
expected_query_params = {
"ifMetagenerationMatch": metageneration,
@@ -3964,7 +3962,7 @@ def _generate_signed_url_helper(
)
else:
expected_api_access_endpoint = api_access_endpoint
- expected_resource = "/{}".format(parse.quote(bucket_name))
+ expected_resource = f"/{parse.quote(bucket_name)}"
if virtual_hosted_style or bucket_bound_hostname:
expected_resource = "/"
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 6a97d8d41..07d1b0655 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -371,7 +371,7 @@ def test_get_service_account_email_wo_project(self):
)
_, kwargs = http.request.call_args
scheme, netloc, path, qs, _ = urllib.parse.urlsplit(kwargs.get("url"))
- self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", client._connection.API_BASE_URL)
self.assertEqual(
path,
"/".join(
@@ -409,7 +409,7 @@ def test_get_service_account_email_w_project(self):
)
_, kwargs = http.request.call_args
scheme, netloc, path, qs, _ = urllib.parse.urlsplit(kwargs.get("url"))
- self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", client._connection.API_BASE_URL)
self.assertEqual(
path,
"/".join(
@@ -899,7 +899,7 @@ def test_get_bucket_miss_w_string_w_defaults(self):
with self.assertRaises(NotFound):
client.get_bucket(bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -931,7 +931,7 @@ def test_get_bucket_hit_w_string_w_timeout(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -961,7 +961,7 @@ def test_get_bucket_hit_w_string_w_metageneration_match(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {
"projection": "noAcl",
"ifMetagenerationMatch": metageneration_number,
@@ -991,7 +991,7 @@ def test_get_bucket_miss_w_object_w_retry(self):
with self.assertRaises(NotFound):
client.get_bucket(bucket_obj, retry=retry)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1023,7 +1023,7 @@ def test_get_bucket_hit_w_object_defaults(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1051,7 +1051,7 @@ def test_get_bucket_hit_w_object_w_retry_none(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1077,7 +1077,7 @@ def test_lookup_bucket_miss_w_defaults(self):
self.assertIsNone(bucket)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1109,7 +1109,7 @@ def test_lookup_bucket_hit_w_timeout(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1139,7 +1139,7 @@ def test_lookup_bucket_hit_w_metageneration_match(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {
"projection": "noAcl",
"ifMetagenerationMatch": metageneration_number,
@@ -1170,7 +1170,7 @@ def test_lookup_bucket_hit_w_retry(self):
self.assertIsInstance(bucket, Bucket)
self.assertEqual(bucket.name, bucket_name)
- expected_path = "/b/%s" % (bucket_name,)
+ expected_path = f"/b/{bucket_name}"
expected_query_params = {"projection": "noAcl"}
expected_headers = {}
client._get_resource.assert_called_once_with(
@@ -1811,7 +1811,7 @@ def test_list_blobs_w_defaults_w_bucket_obj(self):
self.assertIs(iterator.bucket, bucket)
self.assertEqual(iterator.prefixes, set())
- expected_path = "/b/{}/o".format(bucket_name)
+ expected_path = f"/b/{bucket_name}/o"
expected_item_to_value = _item_to_blob
expected_page_token = None
expected_max_results = None
@@ -1855,7 +1855,7 @@ def test_list_blobs_w_explicit_w_user_project(self):
bucket = client._bucket_arg_to_bucket.return_value = mock.Mock(
spec=["path", "user_project"],
)
- bucket.path = "/b/{}".format(bucket_name)
+ bucket.path = f"/b/{bucket_name}"
bucket.user_project = user_project
timeout = 42
retry = mock.Mock(spec=[])
@@ -1881,7 +1881,7 @@ def test_list_blobs_w_explicit_w_user_project(self):
self.assertIs(iterator.bucket, bucket)
self.assertEqual(iterator.prefixes, set())
- expected_path = "/b/{}/o".format(bucket_name)
+ expected_path = f"/b/{bucket_name}/o"
expected_item_to_value = _item_to_blob
expected_page_token = page_token
expected_max_results = max_results
@@ -2119,7 +2119,7 @@ def _create_hmac_key_helper(
email = "storage-user-123@example.com"
secret = "a" * 40
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
- now_stamp = "{}Z".format(now.isoformat())
+ now_stamp = f"{now.isoformat()}Z"
if explicit_project is not None:
expected_project = explicit_project
@@ -2131,7 +2131,7 @@ def _create_hmac_key_helper(
"metadata": {
"accessId": access_id,
"etag": "ETAG",
- "id": "projects/{}/hmacKeys/{}".format(project, access_id),
+ "id": f"projects/{project}/hmacKeys/{access_id}",
"project": expected_project,
"state": "ACTIVE",
"serviceAccountEmail": email,
@@ -2170,7 +2170,7 @@ def _create_hmac_key_helper(
self.assertEqual(metadata._properties, api_response["metadata"])
self.assertEqual(secret, api_response["secret"])
- expected_path = "/projects/{}/hmacKeys".format(expected_project)
+ expected_path = f"/projects/{expected_project}/hmacKeys"
expected_data = None
expected_query_params = {"serviceAccountEmail": email}
@@ -2212,7 +2212,7 @@ def test_list_hmac_keys_w_defaults(self):
self.assertIs(iterator, client._list_resource.return_value)
- expected_path = "/projects/{}/hmacKeys".format(project)
+ expected_path = f"/projects/{project}/hmacKeys"
expected_item_to_value = _item_to_hmac_key_metadata
expected_max_results = None
expected_extra_params = {}
@@ -2252,7 +2252,7 @@ def test_list_hmac_keys_w_explicit(self):
self.assertIs(iterator, client._list_resource.return_value)
- expected_path = "/projects/{}/hmacKeys".format(other_project)
+ expected_path = f"/projects/{other_project}/hmacKeys"
expected_item_to_value = _item_to_hmac_key_metadata
expected_max_results = max_results
expected_extra_params = {
@@ -2300,7 +2300,7 @@ def test_get_hmac_key_metadata_wo_project(self):
)
_, kwargs = http.request.call_args
scheme, netloc, path, qs, _ = urllib.parse.urlsplit(kwargs.get("url"))
- self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", client._connection.API_BASE_URL)
self.assertEqual(
path,
"/".join(
@@ -2355,7 +2355,7 @@ def test_get_hmac_key_metadata_w_project(self):
)
_, kwargs = http.request.call_args
scheme, netloc, path, qs, _ = urllib.parse.urlsplit(kwargs.get("url"))
- self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL)
+ self.assertEqual(f"{scheme}://{netloc}", client._connection.API_BASE_URL)
self.assertEqual(
path,
"/".join(
@@ -2515,7 +2515,7 @@ def test_get_signed_policy_v4_virtual_hosted_style(self):
credentials=_create_signing_credentials(),
)
self.assertEqual(
- policy["url"], "https://{}.storage.googleapis.com/".format(BUCKET_NAME)
+ policy["url"], f"https://{BUCKET_NAME}.storage.googleapis.com/"
)
def test_get_signed_policy_v4_bucket_bound_hostname(self):
diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py
index 59a2b221f..917006b96 100644
--- a/tests/unit/test_hmac_key.py
+++ b/tests/unit/test_hmac_key.py
@@ -177,7 +177,7 @@ def test_time_created_getter(self):
metadata = self._make_one()
now = datetime.datetime.utcnow()
- now_stamp = "{}Z".format(now.isoformat())
+ now_stamp = f"{now.isoformat()}Z"
metadata._properties["timeCreated"] = now_stamp
self.assertEqual(metadata.time_created, now.replace(tzinfo=UTC))
@@ -187,7 +187,7 @@ def test_updated_getter(self):
metadata = self._make_one()
now = datetime.datetime.utcnow()
- now_stamp = "{}Z".format(now.isoformat())
+ now_stamp = f"{now.isoformat()}Z"
metadata._properties["updated"] = now_stamp
self.assertEqual(metadata.updated, now.replace(tzinfo=UTC))
@@ -203,9 +203,7 @@ def test_path_w_access_id_wo_project(self):
metadata = self._make_one()
metadata._properties["accessId"] = access_id
- expected_path = "/projects/{}/hmacKeys/{}".format(
- client.DEFAULT_PROJECT, access_id
- )
+ expected_path = f"/projects/{client.DEFAULT_PROJECT}/hmacKeys/{access_id}"
self.assertEqual(metadata.path, expected_path)
def test_path_w_access_id_w_explicit_project(self):
@@ -215,7 +213,7 @@ def test_path_w_access_id_w_explicit_project(self):
metadata._properties["accessId"] = access_id
metadata._properties["projectId"] = project
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
self.assertEqual(metadata.path, expected_path)
def test_exists_miss_w_defaults(self):
@@ -231,7 +229,7 @@ def test_exists_miss_w_defaults(self):
self.assertFalse(metadata.exists())
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -260,7 +258,7 @@ def test_exists_hit_w_explicit_w_user_project(self):
self.assertTrue(metadata.exists(timeout=timeout, retry=retry))
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_query_params = {"userProject": user_project}
client._get_resource.assert_called_once_with(
expected_path,
@@ -283,7 +281,7 @@ def test_reload_miss_w_defaults(self):
with self.assertRaises(NotFound):
metadata.reload()
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_query_params = {}
client._get_resource.assert_called_once_with(
expected_path,
@@ -314,7 +312,7 @@ def test_reload_hit_w_project_set(self):
self.assertEqual(metadata._properties, resource)
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_query_params = {"userProject": user_project}
client._get_resource.assert_called_once_with(
expected_path,
@@ -338,7 +336,7 @@ def test_update_miss_no_project_set_w_defaults(self):
with self.assertRaises(NotFound):
metadata.update()
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_data = {"state": "INACTIVE"}
expected_query_params = {}
client._put_resource.assert_called_once_with(
@@ -373,7 +371,7 @@ def test_update_hit_w_project_set_w_timeout_w_retry(self):
self.assertEqual(metadata._properties, resource)
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_data = {"state": "ACTIVE"}
expected_query_params = {"userProject": user_project}
client._put_resource.assert_called_once_with(
@@ -411,7 +409,7 @@ def test_delete_miss_no_project_set_w_defaults(self):
with self.assertRaises(NotFound):
metadata.delete()
- expected_path = "/projects/{}/hmacKeys/{}".format(client.project, access_id)
+ expected_path = f"/projects/{client.project}/hmacKeys/{access_id}"
expected_query_params = {}
client._delete_resource.assert_called_once_with(
expected_path,
@@ -436,7 +434,7 @@ def test_delete_hit_w_project_set_w_explicit_timeout_retry(self):
metadata.delete(timeout=timeout, retry=retry)
- expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id)
+ expected_path = f"/projects/{project}/hmacKeys/{access_id}"
expected_query_params = {"userProject": user_project}
client._delete_resource.assert_called_once_with(
expected_path,
diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py
index cf4e15c13..e5f07d5c7 100644
--- a/tests/unit/test_notification.py
+++ b/tests/unit/test_notification.py
@@ -33,10 +33,8 @@ class TestBucketNotification(unittest.TestCase):
NOTIFICATION_ID = "123"
SELF_LINK = "https://example.com/notification/123"
ETAG = "DEADBEEF"
- CREATE_PATH = "/b/{}/notificationConfigs".format(BUCKET_NAME)
- NOTIFICATION_PATH = "/b/{}/notificationConfigs/{}".format(
- BUCKET_NAME, NOTIFICATION_ID
- )
+ CREATE_PATH = f"/b/{BUCKET_NAME}/notificationConfigs"
+ NOTIFICATION_PATH = f"/b/{BUCKET_NAME}/notificationConfigs/{NOTIFICATION_ID}"
@staticmethod
def event_types():