Skip to content

Commit

Permalink
fix F811
Browse files Browse the repository at this point in the history
  • Loading branch information
haifeng-jin authored May 31, 2022
1 parent 53825c7 commit 564b8d9
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 1 deletion.
18 changes: 18 additions & 0 deletions keras/mixed_precision/loss_scale_optimizer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,8 +385,17 @@ def testClipping(self, opt_cls, strategy_fn, use_tf_function):
self.assertEqual(self.evaluate(opt.loss_scale), 8)

# Test Inf gradients are still skipped instead of being clipped
<<<<<<< HEAD
loss = lambda: var * float("Inf")
run_fn = lambda: opt.minimize(loss, var_list=[var])
=======
def run_fn():
def loss():
return var * float("Inf")

return opt.minimize(loss, var_list=[var])

>>>>>>> 0bb24689 (fix F811)
run_op = strategy.experimental_run(run_fn)
self._run_if_in_graph_mode(run_op)
self.assertAllClose(
Expand Down Expand Up @@ -417,8 +426,17 @@ def testDynamicUpdate(self, opt_cls, strategy_fn, use_tf_function):
self.assertEqual(4.0, self.evaluate(opt.loss_scale))

# Test optimizer with NaN gradients
<<<<<<< HEAD
loss = lambda: var * float("NaN")
run_fn = lambda: opt.minimize(loss, var_list=[var])
=======
def run_fn():
def loss():
return var * float("NaN")

return opt.minimize(loss, var_list=[var])

>>>>>>> 0bb24689 (fix F811)
run_op = strategy.experimental_run(run_fn)
self._run_if_in_graph_mode(run_op)
# Variable should not change from before, due to NaN gradients.
Expand Down
1 change: 0 additions & 1 deletion keras/tests/keras_doctest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import sys

import numpy as np
import tensorflow as tf
import tensorflow.compat.v2 as tf
from absl import flags
from absl.testing import absltest
Expand Down

0 comments on commit 564b8d9

Please sign in to comment.