diff --git a/apex/contrib/optimizers/fused_lamb.py b/apex/contrib/optimizers/fused_lamb.py index d37cd6be9..9cb2ad3dd 100644 --- a/apex/contrib/optimizers/fused_lamb.py +++ b/apex/contrib/optimizers/fused_lamb.py @@ -115,7 +115,6 @@ def step(self, closure=None): g_all_16.append(p.grad.data) else: raise RuntimeError('FusedLAMB only support fp16 and fp32.') -:q! g_norm_32, g_norm_16 = 0.0, 0.0 # compute grad norm for two lists