From 932c53e230b546fd6e0d93766f461ed8e1e2ba5a Mon Sep 17 00:00:00 2001 From: code-review-doctor Date: Sun, 24 Apr 2022 18:41:57 +0100 Subject: [PATCH] Fix issue probably-meant-fstring found at https://codereview.doctor --- keras/applications/mobilenet.py | 2 +- .../text_classification_transformer_benchmark_test.py | 4 ++-- keras/dtensor/layout_map.py | 2 +- keras/dtensor/lazy_variable.py | 2 +- keras/engine/base_layer.py | 2 +- keras/layers/rnn/cell_wrappers.py | 2 +- keras/layers/rnn/legacy_cell_wrappers.py | 2 +- keras/layers/rnn/legacy_cells.py | 2 +- keras/optimizers/optimizer_v2/optimizer_v2.py | 2 +- keras/saving/utils_v1/signature_def_utils.py | 2 +- keras/utils/audio_dataset.py | 4 ++-- 11 files changed, 13 insertions(+), 13 deletions(-) diff --git a/keras/applications/mobilenet.py b/keras/applications/mobilenet.py index 5754fa7a860..beaf22b1853 100644 --- a/keras/applications/mobilenet.py +++ b/keras/applications/mobilenet.py @@ -213,7 +213,7 @@ def MobileNet(input_shape=None, if depth_multiplier != 1: raise ValueError('If imagenet weights are being loaded, ' 'depth multiplier must be 1. ' - 'Received depth_multiplier={depth_multiplier}') + f'Received depth_multiplier={depth_multiplier}') if alpha not in [0.25, 0.50, 0.75, 1.0]: raise ValueError('If imagenet weights are being loaded, ' diff --git a/keras/benchmarks/keras_examples_benchmarks/text_classification_transformer_benchmark_test.py b/keras/benchmarks/keras_examples_benchmarks/text_classification_transformer_benchmark_test.py index 511778a6383..3270d100cfc 100644 --- a/keras/benchmarks/keras_examples_benchmarks/text_classification_transformer_benchmark_test.py +++ b/keras/benchmarks/keras_examples_benchmarks/text_classification_transformer_benchmark_test.py @@ -147,8 +147,8 @@ def __init__(self, embed_dim, num_heads=8): self.embed_dim = embed_dim self.num_heads = num_heads if embed_dim % num_heads != 0: - raise ValueError('embedding dimension = {embed_dim} should be divisible' - 'by number of heads = {num_heads}') + raise ValueError(f'embedding dimension = {embed_dim} should be divisible' + f'by number of heads = {num_heads}') self.projection_dim = embed_dim // num_heads self.query_dense = tf.keras.layers.Dense(embed_dim) self.key_dense = tf.keras.layers.Dense(embed_dim) diff --git a/keras/dtensor/layout_map.py b/keras/dtensor/layout_map.py index bcbb12e3583..a16ad11f22e 100644 --- a/keras/dtensor/layout_map.py +++ b/keras/dtensor/layout_map.py @@ -113,7 +113,7 @@ def __setitem__(self, key, layout): 'not use duplicated keys.') if not isinstance(layout, dtensor.Layout): raise ValueError(f'{layout} should be a dtensor.Layout type, ' - 'got {type(layout)}') + f'got {type(layout)}') self._layout_map[key] = layout diff --git a/keras/dtensor/lazy_variable.py b/keras/dtensor/lazy_variable.py index 2895ad56eb2..2ff41b9b628 100644 --- a/keras/dtensor/lazy_variable.py +++ b/keras/dtensor/lazy_variable.py @@ -112,7 +112,7 @@ def __init__( initial_value, "graph") and initial_value.graph.building_function: raise ValueError(f"Argument `initial_value` ({initial_value}) could not " "be lifted out of a `tf.function`. " - "(Tried to create variable with name='{name}'). " + f"(Tried to create variable with name='{name}'). " "To avoid this error, when constructing `tf.Variable`s " "inside of `tf.function` you can create the " "`initial_value` tensor in a " diff --git a/keras/engine/base_layer.py b/keras/engine/base_layer.py index a02b31068e2..51527597fbb 100644 --- a/keras/engine/base_layer.py +++ b/keras/engine/base_layer.py @@ -2677,7 +2677,7 @@ def _get_node_attribute_at_index(self, node_index, attr, attr_name): """ if not self._inbound_nodes: raise RuntimeError(f'The layer {self.name} has never been called ' - 'and thus has no defined {attr_name}.') + f'and thus has no defined {attr_name}.') if not len(self._inbound_nodes) > node_index: raise ValueError(f'Asked to get {attr_name} at node ' f'{node_index}, but the layer has only ' diff --git a/keras/layers/rnn/cell_wrappers.py b/keras/layers/rnn/cell_wrappers.py index dfae3a20cac..5f83ea8b12f 100644 --- a/keras/layers/rnn/cell_wrappers.py +++ b/keras/layers/rnn/cell_wrappers.py @@ -233,7 +233,7 @@ def tensor_and_const_value(v): if const_prob < 0 or const_prob > 1: raise ValueError( f"Parameter {attr} must be between 0 and 1. " - "Received {const_prob}") + f"Received {const_prob}") setattr(self, "_%s" % attr, float(const_prob)) else: setattr(self, "_%s" % attr, tensor_prob) diff --git a/keras/layers/rnn/legacy_cell_wrappers.py b/keras/layers/rnn/legacy_cell_wrappers.py index 8bde804b882..556b60326f8 100644 --- a/keras/layers/rnn/legacy_cell_wrappers.py +++ b/keras/layers/rnn/legacy_cell_wrappers.py @@ -268,7 +268,7 @@ def tensor_and_const_value(v): if const_prob < 0 or const_prob > 1: raise ValueError( f"Parameter {attr} must be between 0 and 1. " - "Received {const_prob}") + f"Received {const_prob}") setattr(self, "_%s" % attr, float(const_prob)) else: setattr(self, "_%s" % attr, tensor_prob) diff --git a/keras/layers/rnn/legacy_cells.py b/keras/layers/rnn/legacy_cells.py index 6b7e2c742fe..0d9d8a4f679 100644 --- a/keras/layers/rnn/legacy_cells.py +++ b/keras/layers/rnn/legacy_cells.py @@ -940,7 +940,7 @@ def build(self, inputs_shape): if inputs_shape[-1] is None: raise ValueError( "Expected inputs.shape[-1] to be known, " - "received shape: {inputs_shape}") + f"received shape: {inputs_shape}") _check_supported_dtypes(self.dtype) input_depth = inputs_shape[-1] h_depth = self._num_units if self._num_proj is None else self._num_proj diff --git a/keras/optimizers/optimizer_v2/optimizer_v2.py b/keras/optimizers/optimizer_v2/optimizer_v2.py index 98138441b37..822e3142638 100644 --- a/keras/optimizers/optimizer_v2/optimizer_v2.py +++ b/keras/optimizers/optimizer_v2/optimizer_v2.py @@ -1187,7 +1187,7 @@ def set_weights(self, weights): params = self.weights if len(params) != len(weights): raise ValueError( - "You called `set_weights(weights)` on optimizer {self._name} " + f"You called `set_weights(weights)` on optimizer {self._name} " f"with a weight list of length {str(len(weights))}, " f"but the optimizer was expecting {str(len(params))} " f"weights. Provided weights: {str(weights)[:50]}...") diff --git a/keras/saving/utils_v1/signature_def_utils.py b/keras/saving/utils_v1/signature_def_utils.py index 4b869902c0e..b91d2097b76 100644 --- a/keras/saving/utils_v1/signature_def_utils.py +++ b/keras/saving/utils_v1/signature_def_utils.py @@ -58,7 +58,7 @@ def _supervised_signature_def( ValueError: If inputs or outputs is `None`. """ if inputs is None or not inputs: - raise ValueError('f{method_name} `inputs` cannot be None or empty.') + raise ValueError(f'{method_name} `inputs` cannot be None or empty.') signature_inputs = {key: tf.compat.v1.saved_model.build_tensor_info(tensor) for key, tensor in inputs.items()} diff --git a/keras/utils/audio_dataset.py b/keras/utils/audio_dataset.py index cebc5a10416..348e82e7041 100644 --- a/keras/utils/audio_dataset.py +++ b/keras/utils/audio_dataset.py @@ -152,13 +152,13 @@ def audio_dataset_from_directory( if not isinstance(sampling_rate, int): raise ValueError( '`sampling_rate` should have an integer value. ' - 'Received: sampling_rate={sampling_rate}' + f'Received: sampling_rate={sampling_rate}' ) if sampling_rate <= 0: raise ValueError( f'`sampling_rate` should be higher than 0. ' - 'Received: sampling_rate={sampling_rate}' + f'Received: sampling_rate={sampling_rate}' ) if tfio is None: