From 64eb9453d16b89b8105336f1df6245c83627f3bb Mon Sep 17 00:00:00 2001 From: Haifeng Jin <5476582+haifeng-jin@users.noreply.github.com> Date: Tue, 31 May 2022 09:04:41 +0000 Subject: [PATCH] fix the rest --- keras/benchmarks/layer_benchmarks/run_xprof.py | 1 + keras/callbacks.py | 2 +- keras/callbacks_test.py | 2 +- keras/dtensor/lazy_variable.py | 4 +--- keras/engine/base_layer.py | 4 ++-- keras/engine/base_layer_v1.py | 6 ++---- keras/engine/functional.py | 3 +-- keras/engine/functional_utils_test.py | 2 +- keras/engine/saving.py | 2 +- keras/engine/sequential.py | 4 ++-- keras/engine/training_test.py | 8 +++----- keras/integration_test/multi_worker_tutorial_test.py | 2 +- keras/layers/normalization/batch_normalization.py | 4 +--- keras/legacy_tf_layers/migration_utils_test.py | 3 +-- keras/saving/saved_model/load.py | 3 +-- keras/saving/saved_model/saved_model_test.py | 2 +- keras/saving/saving_utils.py | 2 +- keras/saving/utils_v1/__init__.py | 4 +--- keras/testing_infra/keras_doctest_lib_test.py | 2 +- keras/tests/keras_doctest.py | 2 +- keras/utils/data_utils.py | 2 +- setup.cfg | 4 +--- 22 files changed, 27 insertions(+), 41 deletions(-) diff --git a/keras/benchmarks/layer_benchmarks/run_xprof.py b/keras/benchmarks/layer_benchmarks/run_xprof.py index b0e9cf753f95..5f9fd2788d51 100644 --- a/keras/benchmarks/layer_benchmarks/run_xprof.py +++ b/keras/benchmarks/layer_benchmarks/run_xprof.py @@ -16,6 +16,7 @@ from __future__ import division as _division from __future__ import print_function as _print_function +import os import time import uuid diff --git a/keras/callbacks.py b/keras/callbacks.py index cba535fbb1d2..651e37abdeba 100644 --- a/keras/callbacks.py +++ b/keras/callbacks.py @@ -1564,7 +1564,7 @@ def _save_model(self, epoch, batch, logs): ) self._maybe_remove_file() - except IsADirectoryError as e: # h5py 3.x + except IsADirectoryError: # h5py 3.x raise IOError( "Please specify a non-directory filepath for " "ModelCheckpoint. Filepath used is an existing " diff --git a/keras/callbacks_test.py b/keras/callbacks_test.py index 12210283ba2a..9f0560633bfa 100644 --- a/keras/callbacks_test.py +++ b/keras/callbacks_test.py @@ -385,7 +385,7 @@ def on_epoch_end(self, epoch, log=None): if epoch == 5 or epoch == 12: raise RuntimeError("Interruption") - log_dir = self.get_temp_dir() + self.get_temp_dir() # The following asserts that the train counter is fault tolerant. self.assertEqual(model._train_counter.numpy(), 0) diff --git a/keras/dtensor/lazy_variable.py b/keras/dtensor/lazy_variable.py index a230d41aad0d..c42e6c4168f1 100644 --- a/keras/dtensor/lazy_variable.py +++ b/keras/dtensor/lazy_variable.py @@ -181,9 +181,7 @@ def __init__( # TODO(scottzhu): This method and create_and_initialize might be removed if # we decide to just use the tf.Variable to replace this class. def initialize(self): - with ops.name_scope( - self._name, "Variable", skip_on_eager=False - ) as name: + with ops.name_scope(self._name, "Variable", skip_on_eager=False): with ops.colocate_with(self._handle), ops.name_scope("Initializer"): if callable(self._initial_value): initial_value = self._initial_value() diff --git a/keras/engine/base_layer.py b/keras/engine/base_layer.py index 7cefe7d6cc01..07f202d10cbe 100644 --- a/keras/engine/base_layer.py +++ b/keras/engine/base_layer.py @@ -685,6 +685,7 @@ def add_weight( and dtype.is_floating ): old_getter = getter + # Wrap variable constructor to return an AutoCastVariable. def getter(*args, **kwargs): # pylint: disable=function-redefined variable = old_getter(*args, **kwargs) @@ -3082,9 +3083,8 @@ def __setattr__(self, name, value): if ( name == "_self_setattr_tracking" or not getattr(self, "_self_setattr_tracking", True) - or # Exclude @property.setters from tracking - hasattr(self.__class__, name) + or hasattr(self.__class__, name) ): try: super(tf.__internal__.tracking.AutoTrackable, self).__setattr__( diff --git a/keras/engine/base_layer_v1.py b/keras/engine/base_layer_v1.py index ef5e564d154a..6f9864ef43f8 100644 --- a/keras/engine/base_layer_v1.py +++ b/keras/engine/base_layer_v1.py @@ -1279,10 +1279,9 @@ def add_update(self, updates): if ( tf.distribute.has_strategy() and tf.distribute.in_cross_replica_context() - and # When saving the model, the distribution strategy context should be # ignored, following the default path for adding updates. - not call_context.saving + and not call_context.saving ): # Updates don't need to be run in a cross-replica context. return @@ -2335,9 +2334,8 @@ def __setattr__(self, name, value): if ( name == "_self_setattr_tracking" or not getattr(self, "_self_setattr_tracking", True) - or # Exclude @property.setters from tracking - hasattr(self.__class__, name) + or hasattr(self.__class__, name) ): try: super(tf.__internal__.tracking.AutoTrackable, self).__setattr__( diff --git a/keras/engine/functional.py b/keras/engine/functional.py index ca4d6c677532..041d30708fc9 100644 --- a/keras/engine/functional.py +++ b/keras/engine/functional.py @@ -1237,9 +1237,8 @@ def _should_skip_first_node(layer): if layer._self_tracked_trackables: return ( isinstance(layer, Functional) - and # Filter out Sequential models without an input shape. - isinstance( + and isinstance( layer._self_tracked_trackables[0], input_layer_module.InputLayer ) ) diff --git a/keras/engine/functional_utils_test.py b/keras/engine/functional_utils_test.py index 2d1d1f78dd0b..cb9ddf30d648 100644 --- a/keras/engine/functional_utils_test.py +++ b/keras/engine/functional_utils_test.py @@ -223,7 +223,7 @@ def test_build_model_from_intermediate_tensor_with_complicated_model(self): # 2 input layers and 2 Add layer. self.assertLen(model2.layers, 4) class_count = collections.Counter( - [l.__class__ for var_l in model2.layers] + [var_l.__class__ for var_l in model2.layers] ) self.assertEqual(class_count[input_layer_lib.InputLayer], 2) self.assertEqual(class_count[layers.Add], 2) diff --git a/keras/engine/saving.py b/keras/engine/saving.py index 34948c5a8192..b99a60d2eae9 100644 --- a/keras/engine/saving.py +++ b/keras/engine/saving.py @@ -18,4 +18,4 @@ Everything has been moved to keras/saving/. This file will be deleted soon. """ -from keras.saving import * # noqa: F401 +from keras.saving import * # noqa: F401,F403 diff --git a/keras/engine/sequential.py b/keras/engine/sequential.py index b005d1cc5e84..76048c6aa6f7 100644 --- a/keras/engine/sequential.py +++ b/keras/engine/sequential.py @@ -333,7 +333,7 @@ def _build_graph_network_for_inferred_shape( # Create Functional API connection by calling the # current layer layer_output = layer(layer_input) - except: # pylint:disable=bare-except + except: # noqa: E722 # Functional API calls may fail for a number of # reasons: 1) The layer may be buggy. In this case # it will be easier for the user to debug if we fail @@ -367,7 +367,7 @@ def _build_graph_network_for_inferred_shape( # not be supporting such layers. self._init_graph_network(inputs, outputs) self._graph_initialized = True - except: # pylint:disable=bare-except + except: # noqa: E722 self._use_legacy_deferred_behavior = True self._inferred_input_shape = new_shape diff --git a/keras/engine/training_test.py b/keras/engine/training_test.py index 21ad655924f7..205963726b7f 100644 --- a/keras/engine/training_test.py +++ b/keras/engine/training_test.py @@ -1731,7 +1731,7 @@ def test_mixed_precision(self): "mse", run_eagerly=test_utils.should_run_eagerly(), ) - history = model.fit(x, y, epochs=2) + model.fit(x, y, epochs=2) policy.set_global_policy("float32") @test_combinations.run_all_keras_modes @@ -2378,10 +2378,8 @@ def test_class_weights(self): y_train[:batch_size], class_weight=class_weight, ) - ref_score = model.evaluate( - x_test, y_test, verbose=0 - ) # pylint: disable=unused-variable - score = model.evaluate( # pylint: disable=unused-variable + ref_score = model.evaluate(x_test, y_test, verbose=0) # noqa: F841 + score = model.evaluate( # noqa: F841 x_test[test_ids, :], y_test[test_ids, :], verbose=0 ) # TODO(b/152990697): Fix the class weights test here. diff --git a/keras/integration_test/multi_worker_tutorial_test.py b/keras/integration_test/multi_worker_tutorial_test.py index 89df14576467..09eed5564ce7 100644 --- a/keras/integration_test/multi_worker_tutorial_test.py +++ b/keras/integration_test/multi_worker_tutorial_test.py @@ -70,7 +70,7 @@ class MultiWorkerTutorialTest(parameterized.TestCase, tf.test.TestCase): def skip_fetch_failure_exception(self): try: yield - except zipfile.BadZipfile as e: + except zipfile.BadZipfile: # There can be a race when multiple processes are downloading the # data. Skip the test if that results in loading errors. self.skipTest( diff --git a/keras/layers/normalization/batch_normalization.py b/keras/layers/normalization/batch_normalization.py index 2ce8f56183ae..dc6684cbd878 100644 --- a/keras/layers/normalization/batch_normalization.py +++ b/keras/layers/normalization/batch_normalization.py @@ -898,9 +898,7 @@ def _compose_transforms(scale, offset, then_scale, then_offset): # Determine a boolean value for `training`: could be True, False, or # None. training_value = control_flow_util.constant_value(training) - if ( - training_value == False - ): # pylint: disable=singleton-comparison,g-explicit-bool-comparison + if training_value == False: # noqa: E712 mean, variance = self.moving_mean, self.moving_variance else: if self.adjustment: diff --git a/keras/legacy_tf_layers/migration_utils_test.py b/keras/legacy_tf_layers/migration_utils_test.py index 0e7eb47fbe81..1588d7c87e27 100644 --- a/keras/legacy_tf_layers/migration_utils_test.py +++ b/keras/legacy_tf_layers/migration_utils_test.py @@ -209,9 +209,8 @@ def test_num_rand_ops_disallow_repeated_ops_seed(self): a_prime = tf.random.uniform(shape=(3, 1)) a_prime = a_prime * 3 error_string = "An exception should have been raised before this" - error_raised = "An exception should have been raised before this" try: - c = tf.random.uniform(shape=(3, 1)) + tf.random.uniform(shape=(3, 1)) raise RuntimeError(error_string) except ValueError as err: diff --git a/keras/saving/saved_model/load.py b/keras/saving/saved_model/load.py index 826b792e3cde..ff86f6c92cea 100644 --- a/keras/saving/saved_model/load.py +++ b/keras/saving/saved_model/load.py @@ -715,9 +715,8 @@ def finalize_objects(self): for node_id, (node, _) in self.loaded_nodes.items(): if ( not isinstance(node, base_layer.Layer) - or # Don't finalize models until all layers have finished loading. - node_id in self.model_layer_dependencies + or node_id in self.model_layer_dependencies ): continue diff --git a/keras/saving/saved_model/saved_model_test.py b/keras/saving/saved_model/saved_model_test.py index f0c70e9b68b5..ee5a718c2554 100644 --- a/keras/saving/saved_model/saved_model_test.py +++ b/keras/saving/saved_model/saved_model_test.py @@ -1125,7 +1125,7 @@ def __call__(self, inputs): class Model(keras.models.Model): def __init__(self): super().__init__() - self.layer = CustomLayer() + self.layer = CustomLayer() # noqa: F821 @tf.function(input_signature=[tf.TensorSpec([None, 1])]) def call(self, inputs): diff --git a/keras/saving/saving_utils.py b/keras/saving/saving_utils.py index 08e52389fec5..1cc5f7009ac2 100644 --- a/keras/saving/saving_utils.py +++ b/keras/saving/saving_utils.py @@ -365,7 +365,7 @@ def try_build_compiled_arguments(model): model.compiled_loss.build(model.outputs) if not model.compiled_metrics.built: model.compiled_metrics.build(model.outputs, model.outputs) - except: # pylint: disable=bare-except + except: # noqa: E722 logging.warning( "Compiled the loaded model, but the compiled metrics have " "yet to be built. `model.compile_metrics` will be empty " diff --git a/keras/saving/utils_v1/__init__.py b/keras/saving/utils_v1/__init__.py index 8ef60d06537e..5ecb45991aca 100644 --- a/keras/saving/utils_v1/__init__.py +++ b/keras/saving/utils_v1/__init__.py @@ -18,8 +18,7 @@ from __future__ import division from __future__ import print_function -# pylint: disable=wildcard-import -from keras.saving.utils_v1.export_output import * +from keras.saving.utils_v1.export_output import * # noqa: F403 from keras.saving.utils_v1.export_utils import EXPORT_TAG_MAP from keras.saving.utils_v1.export_utils import SIGNATURE_KEY_MAP from keras.saving.utils_v1.export_utils import build_all_signature_defs @@ -28,5 +27,4 @@ from keras.saving.utils_v1.export_utils import get_temp_export_dir from keras.saving.utils_v1.export_utils import get_timestamped_export_dir -# pylint: enable=wildcard-import # LINT.ThenChange(//tensorflow/python/saved_model/model_utils/__init__.py) diff --git a/keras/testing_infra/keras_doctest_lib_test.py b/keras/testing_infra/keras_doctest_lib_test.py index 74c6cd3528c0..47e15259a7a7 100644 --- a/keras/testing_infra/keras_doctest_lib_test.py +++ b/keras/testing_infra/keras_doctest_lib_test.py @@ -55,7 +55,7 @@ class KerasDoctestOutputCheckerTest(parameterized.TestCase): ["text1.0 text", []], ["text 1.0text", []], ["text1.0text", []], - ["0x12e4", []], # not 12000 + ["0x12e4", []], # not 12000 ["TensorBoard: http://128.0.0.1:8888", []], # With a newline ["1.0 text\n 2.0 3.0 text", [1.0, 2.0, 3.0]], diff --git a/keras/tests/keras_doctest.py b/keras/tests/keras_doctest.py index 0ab6907bfa63..90f2c66b6d4e 100644 --- a/keras/tests/keras_doctest.py +++ b/keras/tests/keras_doctest.py @@ -32,7 +32,7 @@ # We put doctest after absltest so that it picks up the unittest monkeypatch. # Otherwise doctest tests aren't runnable at all. -import doctest # pylint: disable=g-import-not-at-top,g-bad-import-order +import doctest # noqa: E402 FLAGS = flags.FLAGS diff --git a/keras/utils/data_utils.py b/keras/utils/data_utils.py index 0d7aec47eca1..ed698b849db8 100644 --- a/keras/utils/data_utils.py +++ b/keras/utils/data_utils.py @@ -298,7 +298,7 @@ def __call__(self, block_num, block_size, total_size): raise Exception(error_msg.format(origin, e.code, e.msg)) except urllib.error.URLError as e: raise Exception(error_msg.format(origin, e.errno, e.reason)) - except (Exception, KeyboardInterrupt) as e: + except (Exception, KeyboardInterrupt): if os.path.exists(fpath): os.remove(fpath) raise diff --git a/setup.cfg b/setup.cfg index 81215f22b2da..328384b0f990 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,5 @@ profile=black [flake8] # imported but unused in __init__.py, that's ok. per-file-ignores=**/__init__.py:F401 -ignore=E203,W503 +ignore=E203,W503,F632,E266 max-line-length=80 -# Only check line-too-long and ignore other errors. -select=E501