From cf7ee9e84683b9625782baaba1702bd57bda179e Mon Sep 17 00:00:00 2001 From: barry-jin Date: Mon, 19 Apr 2021 18:43:54 -0700 Subject: [PATCH] doc add relu --- docs/python_docs/python/api/npx/index.rst | 1 + src/operator/numpy/np_elemwise_unary_op_basic.cc | 3 +++ 2 files changed, 4 insertions(+) diff --git a/docs/python_docs/python/api/npx/index.rst b/docs/python_docs/python/api/npx/index.rst index e5f1d8a03eaa..e89ad3d138a8 100644 --- a/docs/python_docs/python/api/npx/index.rst +++ b/docs/python_docs/python/api/npx/index.rst @@ -82,6 +82,7 @@ More operators :toctree: generated/ sigmoid + relu smooth_l1 softmax log_softmax diff --git a/src/operator/numpy/np_elemwise_unary_op_basic.cc b/src/operator/numpy/np_elemwise_unary_op_basic.cc index 5031efe8207b..5993999848ee 100644 --- a/src/operator/numpy/np_elemwise_unary_op_basic.cc +++ b/src/operator/numpy/np_elemwise_unary_op_basic.cc @@ -30,8 +30,11 @@ namespace op { MXNET_OPERATOR_REGISTER_UNARY(_npx_relu) .describe(R"code(Computes rectified linear activation. + .. math:: + max(features, 0) + )code" ADD_FILELINE) .set_attr("FCompute", UnaryOp::Compute) .set_attr("FGradient", ElemwiseGradUseOut{"_backward_relu"});