Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
minor in act
Browse files Browse the repository at this point in the history
  • Loading branch information
antinucleon committed Jun 21, 2015
1 parent be5bf08 commit 9c214c8
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 5 deletions.
7 changes: 4 additions & 3 deletions include/mxnet/operator.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class Operator {
* in_shape allows unknown elements, which are checked by shape.ndim() == 0.
* For unknown shapes, InferShape will try to fill in the correct Shape in in_shape
* For known shapes, InferShape will check shape consistency
*
*
* common practice: set the shape of data input, and usually weight's shape can be infered
*
* \param out_shape the shape of outputs of the operator
Expand All @@ -81,7 +81,7 @@ class Operator {
* \brief perform a forward operation of operator, save the output to TBlob
* \param opt option on Forward such as whether this is training phase
* \param ctx runtime context
* \param in_data array of input data
* \param in_data array of input data, it is const
* \param out_data array of output data,
* the space of TBlob in out_data must be pre-allocated with InferShape
*/
Expand All @@ -97,14 +97,15 @@ class Operator {
* \param out_grad array of output gradient, there could be three possible TBlob
* in the each element in the array
* \param req_types request types of the gradient saving operation
* only inplace will change input data
* \sa GradReqType
*/
virtual void Backward(RunContext ctx,
const std::vector<TBlob> &grad_next,
const std::vector<TBlob> &in_data,
const std::vector<TBlob> &out_grad,
const std::vector<GradReqType> &req);

/*!
* \brief factory unction, create a new operator
* \param type the type of operator
Expand Down
5 changes: 3 additions & 2 deletions src/operator/activation_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,9 @@ class ActivationOp : public Operator {
static_cast<mshadow::Stream<xpu> *>(ctx.stream);
mshadow::Tensor<xpu, 2> grad = grad_next[0].FlatTo2D<xpu, real_t>(stream);
mshadow::Tensor<xpu, 2> data = in_data[0].FlatTo2D<xpu, real_t>(stream);
mshadow::Tensor<xpu, 2> out = in_data[0].FlatTo2D<xpu, real_t>(stream);
Assign(out, req[0], mshadow::expr::F<BackOp>(data) * grad);
mshadow::Tensor<xpu, 2> out = out_grad[0].FlatTo2D<xpu, real_t>(stream);
Assign(out, req[0], mshadow::expr::F<BackOp>(
mshadow::expr::F<ForwardOp>(data)) * grad);
}
}; // class ActivationOp
} // namespace op
Expand Down

0 comments on commit 9c214c8

Please sign in to comment.