Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

MKLDNN can be turned off with env var #12058

Merged
merged 9 commits into from
Aug 17, 2018
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/faq/env_var.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,11 @@ When USE_PROFILER is enabled in Makefile or CMake, the following environments ca
* MXNET_HOME
- Data directory in the filesystem for storage, for example when downloading gluon models.
- Default in *nix is .mxnet APPDATA/mxnet in windows.

* MXNET_MKLDNN_ENABLED
- Values: 0, 1 ```(default=1)```
- Flag to enable or disable MKLDNN accelerator. On by default.
- Only applies to mxnet that has been compiled with MKLDNN (```pip install mxnet-mkl``` or built from source with ```USE_MKLDNN=1```)

Settings for Minimum Memory Usage
---------------------------------
Expand Down
6 changes: 6 additions & 0 deletions src/executor/attach_op_execs_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,12 @@ void CreateOpExecs(const Graph& g, OpExecVector* p_ret, size_t i) {
const auto& vctx = g.GetAttr<ContextVector>("context");
const auto& dispatch_modes = g.GetAttr<DispatchModeVector>("dispatch_mode");

#if MXNET_USE_MKLDNN == 1
if (!MKLDNNEnvSet()) common::LogOnce("MXNET_MKLDNN_ENABLED flag is off. "
"You can re-enable by setting MXNET_MKLDNN_ENABLED=1");
#endif


Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This warning only gets printed for symbolic. What about imperative/Gluon?

// get the graph
const auto& idx = g.indexed_graph();
OpExecVector& ret = *p_ret;
Expand Down
8 changes: 8 additions & 0 deletions src/operator/nn/activation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ inline static bool ActivationStorageType(const nnvm::NodeAttrs& attrs,
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param)) {
*dispatch_mode = DispatchMode::kFComputeEx;
}
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
*dispatch_mode = DispatchMode::kFComputeFallback;
return ret;
}
#endif
return ret;
}
Expand Down Expand Up @@ -158,6 +162,10 @@ inline static bool BackwardActStorageType(const nnvm::NodeAttrs& attrs,
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNAct(param)) {
*dispatch_mode = DispatchMode::kFComputeEx;
}
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
*dispatch_mode = DispatchMode::kFComputeFallback;
return ret;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this return statement redundant due to the statement in line 170?

}
#endif
return ret;
}
Expand Down
3 changes: 3 additions & 0 deletions src/operator/nn/batch_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,9 @@ static inline bool BatchNormStorageType(const nnvm::NodeAttrs &attrs,
dispatched = MKLDNNStorageType(attrs, dev_mask, true, dispatch_mode,
in_attrs, out_attrs);
}
if (!MKLDNNEnvSet()) {
*dispatch_mode = DispatchMode::kFComputeFallback;
}
#else
for (int& v : *in_attrs)
if (v == - 1) v = kDefaultStorage;
Expand Down
8 changes: 8 additions & 0 deletions src/operator/nn/concat.cc
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,10 @@ inline static bool ConcatForwardInferStorageType(const nnvm::NodeAttrs& attrs,
if (!dispatched) {
dispatched = dispatch_fallback(out_attrs, dispatch_mode);
}
#if MXNET_USE_MKLDNN == 1
if (!MKLDNNEnvSet())
*dispatch_mode = DispatchMode::kFComputeFallback;
#endif
return dispatched;
}

Expand All @@ -213,6 +217,10 @@ inline static bool BackwardConcatStorageType(const nnvm::NodeAttrs& attrs,
else
#endif
wanted_mode = DispatchMode::kFCompute;
#if MXNET_USE_MKLDNN == 1
if (!MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
#endif
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, wanted_mode);
}
Expand Down
8 changes: 6 additions & 2 deletions src/operator/nn/convolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,9 @@ inline static bool ConvStorageType(const nnvm::NodeAttrs& attrs,

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
else if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
Expand All @@ -322,7 +324,9 @@ inline static bool BackwardConvStorageType(const nnvm::NodeAttrs& attrs,

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
else if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
Expand Down
8 changes: 6 additions & 2 deletions src/operator/nn/deconvolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,9 @@ inline static bool DeconvStorageType(const nnvm::NodeAttrs& attrs,

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
else if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
Expand All @@ -289,7 +291,9 @@ inline static bool BackwardDeconvStorageType(const nnvm::NodeAttrs& attrs,

DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask)
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
else if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
Expand Down
9 changes: 9 additions & 0 deletions src/operator/nn/fully_connected.cc
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,11 @@ inline static bool FCStorageType(const nnvm::NodeAttrs& attrs,
dispatched = storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
}
#if MXNET_USE_MKLDNN == 1
if (!MKLDNNEnvSet())
*dispatch_mode = DispatchMode::kFComputeFallback;
#endif

if (!dispatched) {
dispatched = dispatch_fallback(out_attrs, dispatch_mode);
}
Expand Down Expand Up @@ -223,6 +228,10 @@ inline static bool BackwardFCStorageType(const nnvm::NodeAttrs& attrs,
dispatched = storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
}
#if MXNET_USE_MKLDNN == 1
if (!MKLDNNEnvSet())
*dispatch_mode = DispatchMode::kFComputeFallback;
#endif
return dispatched;
}

Expand Down
19 changes: 11 additions & 8 deletions src/operator/nn/lrn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,12 @@ bool LRNForwardInferStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *out_attrs) {
CHECK(!in_attrs->empty());
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask) {
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeFallback);
} else if (dev_mask == mshadow::cpu::kDevMask) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
return true;
}
#endif
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
Expand All @@ -106,15 +108,16 @@ bool LRNBackwardInferStorageType(const nnvm::NodeAttrs& attrs,
std::vector<int> *out_attrs) {
CHECK(!in_attrs->empty());
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask) {
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeFallback);
} else if (dev_mask == mshadow::cpu::kDevMask) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
return true;
}
#endif
storage_type_assign(out_attrs, mxnet::kDefaultStorage,
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFCompute);
return true;
}

#if MXNET_USE_MKLDNN == 1
Expand Down
4 changes: 4 additions & 0 deletions src/operator/nn/mkldnn/mkldnn_base-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,10 @@ static inline bool SupportMKLDNN(const NDArray &input) {
&& SupportStorageMKLDNN(input.storage_type());
}

static inline bool MKLDNNEnvSet() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we declare a static var so the linear time env lookup only happen once ?

return dmlc::GetEnv("MXNET_MKLDNN_ENABLED", true);
}

/*
* This is to align address to a certain alignment.
*/
Expand Down
10 changes: 8 additions & 2 deletions src/operator/nn/pooling.cc
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,10 @@ inline static bool PoolingStorageType(const nnvm::NodeAttrs &attrs,

#if MXNET_USE_MKLDNN == 1
const PoolingParam &param = nnvm::get<PoolingParam>(attrs.parsed);
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeFallback);
} else if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
}
Expand All @@ -316,7 +319,10 @@ inline static bool BackwardPoolingStorageType(const nnvm::NodeAttrs &attrs,
CHECK_EQ(out_attrs->size(), 1);

#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeFallback);
} else if (dev_mask == mshadow::cpu::kDevMask && SupportMKLDNNPooling(param)) {
return storage_type_assign(out_attrs, mxnet::kDefaultStorage,
dispatch_mode, DispatchMode::kFComputeEx);
}
Expand Down
4 changes: 3 additions & 1 deletion src/operator/nn/softmax.cc
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ inline static bool SoftmaxStorageType(const nnvm::NodeAttrs& attrs,
DispatchMode wanted_mode;
#if MXNET_USE_MKLDNN == 1
// We only run MKLDNN op if it runs on CPU.
if (dev_mask == mshadow::cpu::kDevMask)
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet())
wanted_mode = DispatchMode::kFComputeFallback;
else if (dev_mask == mshadow::cpu::kDevMask)
wanted_mode = DispatchMode::kFComputeEx;
else
#endif
Expand Down
8 changes: 6 additions & 2 deletions src/operator/tensor/elemwise_binary_op_basic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@ static inline bool ElemwiseAddStorageType(const nnvm::NodeAttrs& attrs,
bool ret = ElemwiseBinaryOp::PreferDenseStorageType<true, true, true>(
attrs, dev_mask, dispatch_mode, in_attrs, out_attrs);
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
*dispatch_mode = DispatchMode::kFComputeFallback;
} else if (dev_mask == mshadow::cpu::kDevMask
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please also align the 2 lines below with this one.

&& common::ContainsOnlyStorage(*in_attrs, kDefaultStorage)
&& out_attrs->at(0) == kDefaultStorage) {
*dispatch_mode = DispatchMode::kFComputeEx;
Expand Down Expand Up @@ -132,7 +134,9 @@ static inline bool ElemwiseAddBackwardStorageType(const nnvm::NodeAttrs& attrs,
bool ret = ElemwiseStorageType<1, 2, true, true, true>(attrs, dev_mask, dispatch_mode,
in_attrs, out_attrs);
#if MXNET_USE_MKLDNN == 1
if (dev_mask == mshadow::cpu::kDevMask) {
if (dev_mask == mshadow::cpu::kDevMask && !MKLDNNEnvSet()) {
*dispatch_mode = DispatchMode::kFComputeFallback;
} else if (dev_mask == mshadow::cpu::kDevMask) {
*dispatch_mode = DispatchMode::kFComputeEx;
}
#endif
Expand Down