Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
add gradient on
Browse files Browse the repository at this point in the history
  • Loading branch information
kshitij12345 committed May 18, 2019
1 parent 9d1ddb3 commit 16e1815
Showing 1 changed file with 23 additions and 11 deletions.
34 changes: 23 additions & 11 deletions src/operator/tensor/elemwise_unary_op_basic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -989,16 +989,7 @@ The storage type of ``log`` output is always dense
)code" ADD_FILELINE)
.set_attr<FCompute>("FCompute<cpu>", UnaryOp::LogCompute<cpu, mshadow_op::log>)
.set_attr<nnvm::FGradient>("FGradient",
[](const nnvm::NodePtr& n, const std::vector<nnvm::NodeEntry>& ograds) {
auto x_grad = MakeNode("reciprocal", n->attrs.name + "_mid_x_grad",
{n->inputs[0]}, nullptr, &n);
auto in_grad = MakeNode("elemwise_mul", n->attrs.name + "_backward",
{ograds[0], nnvm::NodeEntry{x_grad, 0, 0}}, nullptr, &n);
std::vector<nnvm::NodeEntry> ret;
ret.emplace_back(nnvm::NodeEntry{in_grad, 0, 0});
return ret;
});
.set_attr<nnvm::FGradient>("FGradient", ElemwiseGradUseIn{"_backward_log"});

// log10
MXNET_OPERATOR_REGISTER_UNARY_WITH_SPARSE_DR(log10, cpu, mshadow_op::log10)
Expand All @@ -1025,7 +1016,28 @@ The storage type of ``log2`` output is always dense
.set_attr<nnvm::FGradient>("FGradient", ElemwiseGradUseIn{"_backward_log2"});

MXNET_OPERATOR_REGISTER_BINARY_WITH_SPARSE_CPU_DR(_backward_log,
unary_bwd<mshadow_op::log_grad>);
unary_bwd<mshadow_op::log_grad>)
.set_attr<nnvm::FGradient>("FGradient",
[](const nnvm::NodePtr& n, const std::vector<nnvm::NodeEntry>& ograds) {
// For g(x) -> g = log
// g''(x) = -1 * (g'(x) * g'(x))
auto gx = nnvm::NodeEntry{n, 0, 0};
auto ggx_mid = MakeNode("elemwise_mul", n->attrs.name + "_backward_mid_grad_grad",
{gx, gx}, nullptr, &n);
auto ggx = MakeNode("negative", n->attrs.name + "_backward_grad_grad",
{nnvm::NodeEntry{ggx_mid, 0, 0}}, nullptr, &n);
auto gxx_chain = MakeNode("elemwise_mul", n->attrs.name + "_backward_grad_grad_inp",
{ograds[0], nnvm::NodeEntry{ggx, 0, 0}}, nullptr, &n);

// grad
auto ggrad_chain = MakeNode("elemwise_mul", n->attrs.name + "_backward_grad_grad",
{ograds[0], gx}, nullptr, &n);

std::vector<nnvm::NodeEntry> ret;
ret.emplace_back(nnvm::NodeEntry{ggrad_chain, 0, 0});
ret.emplace_back(nnvm::NodeEntry{gxx_chain, 0, 0});
return ret;
});

MXNET_OPERATOR_REGISTER_BINARY_WITH_SPARSE_CPU_DR(_backward_log10,
unary_bwd<mshadow_op::log10_grad>);
Expand Down

0 comments on commit 16e1815

Please sign in to comment.