diff --git a/docs/python_docs/python/api/npx/index.rst b/docs/python_docs/python/api/npx/index.rst index e5f1d8a03eaa..e89ad3d138a8 100644 --- a/docs/python_docs/python/api/npx/index.rst +++ b/docs/python_docs/python/api/npx/index.rst @@ -82,6 +82,7 @@ More operators :toctree: generated/ sigmoid + relu smooth_l1 softmax log_softmax diff --git a/src/operator/numpy/np_elemwise_unary_op_basic.cc b/src/operator/numpy/np_elemwise_unary_op_basic.cc index 5031efe8207b..5993999848ee 100644 --- a/src/operator/numpy/np_elemwise_unary_op_basic.cc +++ b/src/operator/numpy/np_elemwise_unary_op_basic.cc @@ -30,8 +30,11 @@ namespace op { MXNET_OPERATOR_REGISTER_UNARY(_npx_relu) .describe(R"code(Computes rectified linear activation. + .. math:: + max(features, 0) + )code" ADD_FILELINE) .set_attr("FCompute", UnaryOp::Compute) .set_attr("FGradient", ElemwiseGradUseOut{"_backward_relu"});