From 59ed13fa1579c56d3170476915bd069e87af9b8a Mon Sep 17 00:00:00 2001 From: Rohit Kumar Srivastava Date: Tue, 14 Aug 2018 16:58:04 +0000 Subject: [PATCH] Removed fixed seed and increased learning rate and tolerance for test_nadam --- tests/python/unittest/test_optimizer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/python/unittest/test_optimizer.py b/tests/python/unittest/test_optimizer.py index bdd71eef81a4..449cdb423466 100644 --- a/tests/python/unittest/test_optimizer.py +++ b/tests/python/unittest/test_optimizer.py @@ -943,7 +943,7 @@ def test_ftrl(): compare_optimizer(opt1(lazy_update=True, **kwarg), opt2(**kwarg), shape, np.float32, w_stype='row_sparse', g_stype='row_sparse') -@with_seed(1234) +@with_seed() def test_nadam(): def get_net(num_hidden, flatten=True): @@ -965,10 +965,10 @@ def get_net(num_hidden, flatten=True): loss = Loss(output, l) loss = mx.sym.make_loss(loss) mod = mx.mod.Module(loss, data_names=('data',), label_names=('label',)) - mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.0005, 'wd': 0.0005}, + mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.001, 'wd': 0.0005}, initializer=mx.init.Xavier(magnitude=2), eval_metric=mx.metric.Loss(), optimizer='nadam') - assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.1 + assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.11 # AdaGrad class PyAdaGrad(mx.optimizer.Optimizer):