From db1a761bb76c9dcc52dee8ee88e5badd616bc325 Mon Sep 17 00:00:00 2001 From: igeni Date: Fri, 22 Mar 2024 15:23:16 +0300 Subject: [PATCH] =?UTF-8?q?replaced=20concatenation=20to=20f-strings=20to?= =?UTF-8?q?=20improve=20readability=20and=20unify=20=E2=80=A6=20(#29785)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit replaced concatenation to f-strings to improve readability and unify with the rest code --- src/transformers/tokenization_utils_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/tokenization_utils_base.py b/src/transformers/tokenization_utils_base.py index e4f1833a5f333d..ff97ea10adcbd7 100644 --- a/src/transformers/tokenization_utils_base.py +++ b/src/transformers/tokenization_utils_base.py @@ -3668,7 +3668,7 @@ def truncate_sequences( ids = ids[ids_to_move:] pair_ids = pair_ids[pair_ids_to_move:] if pair_ids is not None else None else: - raise ValueError("invalid truncation strategy:" + str(self.truncation_side)) + raise ValueError(f"invalid truncation strategy:{self.truncation_side}") elif truncation_strategy == TruncationStrategy.ONLY_SECOND and pair_ids is not None: if len(pair_ids) > num_tokens_to_remove: @@ -3680,7 +3680,7 @@ def truncate_sequences( overflowing_tokens = pair_ids[:window_len] pair_ids = pair_ids[num_tokens_to_remove:] else: - raise ValueError("invalid truncation strategy:" + str(self.truncation_side)) + raise ValueError(f"invalid truncation strategy:{self.truncation_side}") else: logger.error( f"We need to remove {num_tokens_to_remove} to truncate the input " @@ -3764,7 +3764,7 @@ def _pad( encoded_inputs["special_tokens_mask"] = [1] * difference + encoded_inputs["special_tokens_mask"] encoded_inputs[self.model_input_names[0]] = [self.pad_token_id] * difference + required_input else: - raise ValueError("Invalid padding strategy:" + str(self.padding_side)) + raise ValueError(f"Invalid padding strategy:{self.padding_side}") return encoded_inputs