Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Rearrange tests written only for update_on_kvstore = True #13514

Merged
merged 4 commits into from
Dec 22, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions tests/python/unittest/test_gluon_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,13 @@ def dict_equ(a, b):
if trainer._update_on_kvstore:
dict_equ(trainer._kvstore._updater.states, states)
assert trainer._optimizer == trainer._kvstore._updater.optimizer
# invalid usage of update and allreduce_grads if update_on_kvstore
assert_raises(AssertionError, trainer.update, 1)
assert_raises(AssertionError, trainer.allreduce_grads)
else:
for updater in trainer._updaters:
dict_equ(updater.states, states)
assert trainer._optimizer == trainer._updaters[0].optimizer
assert_raises(AssertionError, trainer.update, 1)
assert_raises(AssertionError, trainer.allreduce_grads)

x = gluon.Parameter('x', shape=(10,))
x.initialize(ctx=[mx.cpu(0), mx.cpu(1)], init='zeros')
Expand Down Expand Up @@ -193,8 +194,10 @@ def check_trainer_reset_kv(kv):
# load would reset kvstore
mx.nd.waitall()
params.load('test_trainer_reset_kv.params')
assert trainer._kvstore is None
assert trainer._kv_initialized is False
if trainer._update_on_kvstore:
# drop kvstore state if new parameters are loaded
assert trainer._kvstore is None
assert trainer._kv_initialized is False
with mx.autograd.record():
for w in x.list_data():
y = w + 1
Expand Down