Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fallback Amend
Browse files Browse the repository at this point in the history
This is the final rectify for fallback problem(function call)
  • Loading branch information
luobao-intel committed Jul 16, 2018
1 parent 2e68c96 commit b40c3b1
Show file tree
Hide file tree
Showing 7 changed files with 206 additions and 113 deletions.
48 changes: 24 additions & 24 deletions src/operator/nn/activation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
#include "./mkldnn/mkldnn_base-inl.h"
#include "./mkldnn/mkldnn_ops-inl.h"
#endif // MXNET_USE_MKLDNN
#include "../operator_common.h"
#include "../../common/utils.h"

namespace mxnet {
namespace op {
Expand Down Expand Up @@ -100,16 +102,16 @@ inline static bool ActivationStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *out_attrs) {
CHECK_EQ(in_attrs->size(), 1);
CHECK_EQ(out_attrs->size(), 1);
bool ret = ElemwiseStorageType<1, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
#if MXNET_USE_MKLDNN == 1
const ActivationParam& param = nnvm::get<ActivationParam>(attrs.parsed);
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param)) {
*dispatch_mode = DispatchMode::kFComputeEx;
}
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param))
return ElemwiseStorageType<1, 1, false, false, false>(
attrs, dev_mask, dispatch_mode, in_attrs, out_attrs);
else
return op::dispatch_fallback(out_attrs, dispatch_mode);
#endif
return ret;
return ElemwiseStorageType<1, 1, false, false, false>(
attrs, dev_mask, dispatch_mode, in_attrs, out_attrs);
}

inline static bool BackwardActStorageType(const nnvm::NodeAttrs& attrs,
Expand All @@ -120,30 +122,28 @@ inline static bool BackwardActStorageType(const nnvm::NodeAttrs& attrs,
bool ret = false;
#if (MXNET_USE_CUDNN == 1 || MXNET_USE_MKLDNN == 1)
const ActivationParam& param = nnvm::get<ActivationParam>(attrs.parsed);
if (param.act_type != activation::kReLU) {
CHECK_EQ(in_attrs->size(), 3U);
ret = ElemwiseStorageType<3, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
} else {
// for ReLU activation, the backward pass only needs ograd and output
CHECK_EQ(in_attrs->size(), 2U);
ret = ElemwiseStorageType<2, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
}
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param))
if (param.act_type != activation::kReLU) {
CHECK_EQ(in_attrs->size(), 3U);
ret = ElemwiseStorageType<3, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
} else {
// for ReLU activation, the backward pass only needs ograd and output
CHECK_EQ(in_attrs->size(), 2U);
ret = ElemwiseStorageType<2, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
}
else
ret = op::dispatch_fallback(out_attrs, dispatch_mode);
#else
CHECK_EQ(in_attrs->size(), 2U);
ret = ElemwiseStorageType<2, 1, false, false, false>(attrs, dev_mask,
dispatch_mode,
in_attrs, out_attrs);
#endif
CHECK_EQ(out_attrs->size(), 1U);
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param)) {
*dispatch_mode = DispatchMode::kFComputeEx;
}
#endif
return ret;
}

Expand Down
22 changes: 4 additions & 18 deletions src/operator/nn/convolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -298,15 +298,8 @@ inline static bool ConvStorageType(const nnvm::NodeAttrs& attrs,
CHECK_EQ(in_attrs->size(), in_expected);
CHECK_EQ(out_attrs->size(), 1);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
wanted_mode = DispatchMode::kFCompute;
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

inline static bool BackwardConvStorageType(const nnvm::NodeAttrs& attrs,
Expand All @@ -320,15 +313,8 @@ inline static bool BackwardConvStorageType(const nnvm::NodeAttrs& attrs,
CHECK_EQ(in_attrs->size(), in_expected);
CHECK_EQ(out_attrs->size(), out_expected);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
wanted_mode = DispatchMode::kFCompute;
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

void ConvolutionParamParser(nnvm::NodeAttrs* attrs) {
Expand Down
24 changes: 6 additions & 18 deletions src/operator/nn/deconvolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
#include "./deconvolution-inl.h"
#include "./mkldnn/mkldnn_ops-inl.h"
#include "./mkldnn/mkldnn_base-inl.h"
#include "../operator_common.h"
#include "../../common/utils.h"

namespace mxnet {
namespace op {
Expand Down Expand Up @@ -266,15 +268,8 @@ inline static bool DeconvStorageType(const nnvm::NodeAttrs& attrs,
CHECK_EQ(in_attrs->size(), in_expected);
CHECK_EQ(out_attrs->size(), 1);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
wanted_mode = DispatchMode::kFCompute;
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

inline static bool BackwardDeconvStorageType(const nnvm::NodeAttrs& attrs,
Expand All @@ -287,15 +282,8 @@ inline static bool BackwardDeconvStorageType(const nnvm::NodeAttrs& attrs,
CHECK_EQ(in_attrs->size(), param.no_bias ? 3U : 4U);
CHECK_EQ(out_attrs->size(), out_expected);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
wanted_mode = DispatchMode::kFCompute;
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

#if MXNET_USE_MKLDNN == 1
Expand Down
27 changes: 7 additions & 20 deletions src/operator/nn/lrn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

#include "./lrn-inl.h"
#include "../operator_common.h"
#include "./mkldnn/mkldnn_base-inl.h"
#if MXNET_USE_MKLDNN == 1
#include "./mkldnn/mkldnn_lrn-inl.h"
#endif
Expand Down Expand Up @@ -87,16 +88,9 @@ bool LRNForwardInferStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
CHECK(!in_attrs->empty());
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask) {
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
return true;
}
#endif
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
return true;

return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

bool LRNBackwardInferStorageType(const nnvm::NodeAttrs& attrs,
Expand All @@ -105,16 +99,9 @@ bool LRNBackwardInferStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
CHECK(!in_attrs->empty());
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask) {
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
return true;
}
#endif
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
return true;

return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}

#if MXNET_USE_MKLDNN == 1
Expand Down
45 changes: 32 additions & 13 deletions src/operator/nn/pooling.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
*/
#include "../elemwise_op_common.h"
#include "./pooling-inl.h"
#include "../operator_common.h"
#include "../../common/utils.h"
#if MXNET_USE_NNPACK == 1
#include "../nnpack/nnpack_pooling-inl.h"
#endif // MXNET_USE_NNPACK
Expand Down Expand Up @@ -276,18 +278,26 @@ inline static bool PoolingStorageType(const nnvm::NodeAttrs &attrs,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
CHECK_EQ(in_attrs->size(), 1);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
const PoolingParam &param = nnvm::get<PoolingParam>(attrs.parsed);
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
}
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param))
wanted_mode = DispatchMode::kFComputeEx;
else
#else
CHECK_EQ(out_attrs->size(), 1);
#endif
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
wanted_mode = DispatchMode::kFCompute;

bool dispatched = false;
if (common::ContainsOnlyStorage(*in_attrs, kDefaultStorage)) {
dispatched = op::storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
}
if (!dispatched) {
dispatched = op::dispatch_fallback(out_attrs, dispatch_mode);
}
return dispatched;
}

inline static bool BackwardPoolingStorageType(const nnvm::NodeAttrs &attrs,
Expand All @@ -299,16 +309,25 @@ inline static bool BackwardPoolingStorageType(const nnvm::NodeAttrs &attrs,
CHECK_EQ(in_attrs->size(), GetNumBackInputs(param));
CHECK_EQ(out_attrs->size(), 1);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
}
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param))
wanted_mode = DispatchMode::kFComputeEx;
else
#else
CHECK_EQ(in_attrs->size(), 3);
#endif
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
wanted_mode = DispatchMode::kFCompute;

bool dispatched = false;
if (common::ContainsOnlyStorage(*in_attrs, kDefaultStorage)) {
dispatched = op::storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
}
if (!dispatched) {
dispatched = op::dispatch_fallback(out_attrs, dispatch_mode);
}
return dispatched;
}

DMLC_REGISTER_PARAMETER(PoolingParam);
Expand Down
23 changes: 3 additions & 20 deletions src/operator/nn/softmax.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
#include "../tensor/elemwise_binary_op.h"
#include "mkldnn/mkldnn_base-inl.h"
#include "mkldnn/mkldnn_ops-inl.h"
#include "../../operator_common.h"

namespace mxnet {
namespace op {
Expand All @@ -50,6 +49,7 @@ static void SoftmaxComputeExCPU(const nnvm::NodeAttrs& attrs,
FallBackCompute(SoftmaxCompute<cpu, mxnet_op::softmax_fwd>, attrs, ctx,
inputs, req, outputs);
}
#endif

inline static bool SoftmaxStorageType(const nnvm::NodeAttrs& attrs,
const int dev_mask,
Expand All @@ -59,26 +59,9 @@ inline static bool SoftmaxStorageType(const nnvm::NodeAttrs& attrs,
CHECK_EQ(in_attrs->size(), 1);
CHECK_EQ(out_attrs->size(), 1);

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
// We only run MKLDNN op if it runs on CPU.
if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
wanted_mode = DispatchMode::kFCompute;

bool dispatched = false;
if (!dispatched && common::ContainsOnlyStorage(*in_attrs, kDefaultStorage)){
dispatched = op::storage_type_assign(out_attrs, mxnet::kDefaultStorage, dispatch_mode, wanted_mode);
}
if (!dispatched){
dispatched = op::dispatch_fallback(out_attrs, dispatch_mode);
}

return dispatched;
return MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}
#endif

MXNET_OPERATOR_REGISTER_UNARY(softmax)
.describe(R"code(Applies the softmax function.
Expand Down
Loading

0 comments on commit b40c3b1

Please sign in to comment.