Skip to content

Commit

Permalink
remove individual checks
Browse files Browse the repository at this point in the history
  • Loading branch information
azai91 committed Nov 28, 2018
1 parent 30e0636 commit 6bda387
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 8 deletions.
2 changes: 1 addition & 1 deletion src/operator/nn/activation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ void ActivationGradComputeExCPU(const nnvm::NodeAttrs& attrs,
const ActivationParam& param = nnvm::get<ActivationParam>(attrs.parsed);
bool relu = param.act_type == activation::kReLU;
CHECK_EQ(inputs.size(), relu ? 2U : 3U);
if (SupportMKLDNN(inputs[0]) && ctx.need_grad) {
if (SupportMKLDNN(inputs[0])) {
MKLDNN_OPCHECK_INIT(true, outputs.size(), inputs, outputs);
// XXX: for y = relu(x), y is passed as "in_data" to Backward()
MKLDNNActivationBackward(attrs, ctx, inputs[0], relu ? inputs[1] : inputs[2], req[0],
Expand Down
3 changes: 1 addition & 2 deletions src/operator/nn/batch_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -421,8 +421,7 @@ void BatchNormGradComputeExCPU(const nnvm::NodeAttrs &attrs,
TShape shape = inputs[0].shape();
// MKLDNN batchnorm only works well on the special MKLDNN layout.
if (SupportMKLDNNBN(inputs[0], param)
&& (inputs[3].IsMKLDNNData() || inputs[0].IsMKLDNNData())
&& ctx.need_grad) {
&& (inputs[3].IsMKLDNNData() || inputs[0].IsMKLDNNData())) {
std::vector<NDArray> out_grad(1);
std::vector<NDArray> out_data(3);
std::vector<NDArray> in_data(3);
Expand Down
2 changes: 1 addition & 1 deletion src/operator/nn/deconvolution.cc
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ static void DeconvolutionGradComputeExCPU(const nnvm::NodeAttrs& attrs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs) {
const DeconvolutionParam& param = nnvm::get<DeconvolutionParam>(attrs.parsed);
if (SupportMKLDNNDeconv(param, inputs[0]) && ctx.need_grad) {
if (SupportMKLDNNDeconv(param, inputs[0])) {
MKLDNN_OPCHECK_INIT(true, outputs.size(), inputs, outputs);
MKLDNNDeconvolutionBackward(attrs, ctx, inputs, req, outputs);
MKLDNN_OPCHECK_RUN(DeconvolutionGradCompute<cpu>, attrs, ctx, inputs, req,
Expand Down
2 changes: 1 addition & 1 deletion src/operator/nn/fully_connected.cc
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ void FullyConnectedGradComputeExCPU(const nnvm::NodeAttrs& attrs,
const std::vector<NDArray> &inputs,
const std::vector<OpReqType> &req,
const std::vector<NDArray> &outputs) {
if (SupportMKLDNN(inputs[0]) && ctx.need_grad) {
if (SupportMKLDNN(inputs[0])) {
MKLDNN_OPCHECK_INIT(true, outputs.size(), inputs, outputs);
MKLDNNFCBackward(attrs, ctx, inputs, req, outputs);
MKLDNN_OPCHECK_RUN(FullyConnectedGradCompute<cpu>, attrs, ctx, inputs, req,
Expand Down
2 changes: 1 addition & 1 deletion src/operator/nn/lrn.cc
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ void LRNGradComputeExCPU(const nnvm::NodeAttrs &attrs,
const NDArray &in_data = inputs[1];
const NDArray &in_grad = outputs[0];

if (SupportMKLDNN(inputs[0]) && ctx.need_grad) {
if (SupportMKLDNN(inputs[0])) {
MKLDNN_OPCHECK_INIT(true, outputs.size(), inputs, outputs);
MKLDNNLRNBackward(ctx, param, out_grad, in_data, req[0], in_grad);
MKLDNN_OPCHECK_RUN(LRNGradCompute<cpu>, attrs, ctx, inputs, req, outputs);
Expand Down
3 changes: 1 addition & 2 deletions src/operator/nn/pooling.cc
Original file line number Diff line number Diff line change
Expand Up @@ -270,8 +270,7 @@ void PoolingGradComputeExCPU(const nnvm::NodeAttrs &attrs, const OpContext &ctx,


if (SupportMKLDNN(inputs[0])
&& SupportMKLDNNPooling(param, inputs[0].shape())
&& ctx.need_grad) {
&& SupportMKLDNNPooling(param, inputs[0].shape())) {
const NDArray &out_grad = inputs[0];
const NDArray *workspace = nullptr;
const NDArray *in_data = nullptr;
Expand Down

0 comments on commit 6bda387

Please sign in to comment.