Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[mkldnn-v1.0]Minor fix for leakyrelu compile flag (#16519)
Browse files Browse the repository at this point in the history
* change to MXNET_USE_MKLDNN == 100

* trigger
  • Loading branch information
xinyu-intel authored and pengzhao-intel committed Oct 19, 2019
1 parent ac7792d commit 6eadab3
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions src/operator/leaky_relu.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@
*/

#include "./leaky_relu-inl.h"
#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
#include "./nn/mkldnn/mkldnn_base-inl.h"
#include "./nn/mkldnn/mkldnn_ops-inl.h"
#endif // MXNET_USE_MKLDNN == 1
#endif // MXNET_USE_MKLDNN == 100

#include <nnvm/op_attr_types.h>
namespace mxnet {
Expand Down Expand Up @@ -84,7 +84,7 @@ static bool LeakyReLUShape(const nnvm::NodeAttrs& attrs,
return true;
}

#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
static void LeakyReLUComputeExCPU(const nnvm::NodeAttrs& attrs,
const OpContext& ctx,
const std::vector<NDArray>& inputs,
Expand Down Expand Up @@ -139,7 +139,7 @@ inline static bool BackwardLeakyReLUStorageType(const nnvm::NodeAttrs& attrs,
return MKLDNNStorageType(attrs, dev_mask, SupportMKLDNNLeakyRelu(param),
dispatch_mode, in_attrs, out_attrs);
}
#endif // MXNET_USE_MKLDNN == 1
#endif // MXNET_USE_MKLDNN == 100

NNVM_REGISTER_OP(LeakyReLU)
.describe(R"code(Applies Leaky rectified linear unit activation element-wise to the input.
Expand Down Expand Up @@ -169,7 +169,7 @@ The following modified ReLU Activation functions are supported:
return param.act_type == leakyrelu::kRReLU ? 2 : 1;
})
.set_attr_parser(ParamParser<LeakyReLUParam>)
#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
.set_attr<FInferStorageType>("FInferStorageType", LeakyReLUStorageType)
#endif
.set_attr<nnvm::FListInputNames>("FListInputNames",
Expand All @@ -187,7 +187,7 @@ The following modified ReLU Activation functions are supported:
.set_attr<mxnet::FInferShape>("FInferShape", LeakyReLUShape)
.set_attr<nnvm::FInferType>("FInferType", LeakyReLUType)
.set_attr<FCompute>("FCompute<cpu>", LeakyReLUCompute<cpu>)
#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
.set_attr<bool>("TIsMKLDNN", true)
.set_attr<FComputeEx>("FComputeEx<cpu>", LeakyReLUComputeExCPU)
#endif
Expand All @@ -211,7 +211,7 @@ NNVM_REGISTER_OP(_backward_LeakyReLU)
return param.act_type == leakyrelu::kPReLU ? 2 : 1;
})
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
.set_attr<FInferStorageType>("FInferStorageType", BackwardLeakyReLUStorageType)
#endif
.set_attr<nnvm::FInplaceOption>("FInplaceOption", [](const NodeAttrs& attrs){
Expand All @@ -221,7 +221,7 @@ NNVM_REGISTER_OP(_backward_LeakyReLU)
return std::vector<ResourceRequest>{ResourceRequest::kTempSpace};
})
.set_attr_parser(ParamParser<LeakyReLUParam>)
#if MXNET_USE_MKLDNN == 1
#if MXNET_USE_MKLDNN == 100
.set_attr<bool>("TIsMKLDNN", true)
.set_attr<FComputeEx>("FComputeEx<cpu>", LeakyReLUGradComputeExCPU)
#endif
Expand Down

0 comments on commit 6eadab3

Please sign in to comment.