Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[MXNET-978] Higher Order Gradient Support clip, dropout. (#15746)
Browse files Browse the repository at this point in the history
* support clip, dropout for higher order grad

* add relevant tests

* retrigger CI
  • Loading branch information
kshitij12345 authored and apeforest committed Sep 6, 2019
1 parent c928392 commit 24f0a10
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 2 deletions.
4 changes: 3 additions & 1 deletion src/operator/nn/dropout.cc
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,16 @@ Example::
.add_arguments(DropoutParam::__FIELDS__());

NNVM_REGISTER_OP(_backward_Dropout)
.set_num_inputs(2)
.set_num_outputs(1)
.set_attr<bool>("TIsLayerOpBackward", true)
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
.set_attr_parser(ParamParser<DropoutParam>)
.set_attr<nnvm::FInplaceOption>("FInplaceOption", [](const NodeAttrs& attrs){
return std::vector<std::pair<int, int> >{{0, 0}};
})
.set_attr<FStatefulCompute>("FStatefulCompute<cpu>", DropoutGradCompute<cpu>);
.set_attr<FStatefulCompute>("FStatefulCompute<cpu>", DropoutGradCompute<cpu>)
.set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes);

} // namespace op
} // namespace mxnet
3 changes: 2 additions & 1 deletion src/operator/tensor/matrix_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -779,7 +779,8 @@ NNVM_REGISTER_OP(_backward_clip)
.set_num_outputs(1)
.set_attr_parser(ParamParser<ClipParam>)
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
.set_attr<FCompute>("FCompute<cpu>", ClipGrad_<cpu>);
.set_attr<FCompute>("FCompute<cpu>", ClipGrad_<cpu>)
.set_attr<nnvm::FGradient>("FGradient", MakeZeroGradNodes);

NNVM_REGISTER_OP(repeat)
.add_alias("_np_repeat")
Expand Down
30 changes: 30 additions & 0 deletions tests/python/unittest/test_higher_order_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,36 @@ def grad_grad_op(x):
check_second_order_unary(array, abs, grad_grad_op)


@with_seed()
def test_clip():
def clip(x):
a_min, a_max = sorted([random.random(), random.random()])

return nd.clip(x, a_min, a_max)

def grad_grad_op(x):
return nd.zeros_like(x)

for dim in range(1, 5):
shape = rand_shape_nd(dim)
array = random_arrays(shape)
check_second_order_unary(array, clip, grad_grad_op)


@with_seed()
def test_dropout():
def dropout(x):
return nd.Dropout(x)

def grad_grad_op(x):
return nd.zeros_like(x)

for dim in range(1, 5):
shape = rand_shape_nd(dim)
array = random_arrays(shape)
check_second_order_unary(array, dropout, grad_grad_op)


def test_sigmoid():
def sigmoid(x):
return nd.sigmoid(x)
Expand Down

0 comments on commit 24f0a10

Please sign in to comment.