Skip to content

Commit

Permalink
[MXNET-860] Reduce redundant copies, check for regressions with clang…
Browse files Browse the repository at this point in the history
…-tidy (apache#12355)

* [MXNET-860] - Fix some unneeded copies

* [MXNET-860] - Add unnecessary copy check to clang-tidy
  • Loading branch information
KellenSunderland authored and anirudh2290 committed Sep 19, 2018
1 parent e50919b commit 74a75d5
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ Checks: >

# In order to trigger an error, you must have a rule defined both in checks and in this section.
WarningsAsErrors: >
cppcoreguidelines-no-malloc
cppcoreguidelines-no-malloc, performance-unnecessary-copy-initialization
# Todo: define a better regex match that includes most project headers, but excludes third party
# code.
Expand Down
4 changes: 2 additions & 2 deletions src/ndarray/ndarray.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1031,7 +1031,7 @@ inline void CopyFromToRspImpl(const NDArray& from, const NDArray& to, RunContext
op::FillZerosRspImpl(s, to);
return;
}
auto aux_shape = from.aux_shape(rowsparse::kIdx);
const auto& aux_shape = from.aux_shape(rowsparse::kIdx);
to.CheckAndAlloc({aux_shape});
TBlob val = to.data();
TBlob idx = to.aux_data(rowsparse::kIdx);
Expand Down Expand Up @@ -1122,7 +1122,7 @@ void CopyFromToImpl(const NDArray& from, const NDArray& to,
if (from_stype == to_stype) {
casted_nd = from; // same stype, no need to cast from
} else { // different stypes on different ctx needs an temporary casted_nd
TShape shape = from.shape();
const TShape& shape = from.shape();
if (to_stype == kDefaultStorage) {
casted_nd = NDArray(shape, from_ctx);
} else {
Expand Down
6 changes: 3 additions & 3 deletions src/operator/contrib/roi_align.cc
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ void ROIAlignForwardCompute(const nnvm::NodeAttrs& attrs,
CHECK_EQ(out_data.size(), expected_out);
CHECK_EQ(out_data[roialign::kOut].shape_[0], in_data[roialign::kBox].shape_[0]);

const ROIAlignParam param = nnvm::get<ROIAlignParam>(attrs.parsed);
const ROIAlignParam& param = nnvm::get<ROIAlignParam>(attrs.parsed);

const int count = out_data[roialign::kOut].Size();
// const int num_rois = in_data[roialign::kBox].size(0);
Expand Down Expand Up @@ -466,7 +466,7 @@ void ROIAlignBackwardCompute(const nnvm::NodeAttrs& attrs,
CHECK_NE(req[1], kWriteInplace) <<
"ROIAlign: Backward doesn't support kWriteInplace.";

const ROIAlignParam param = nnvm::get<ROIAlignParam>(attrs.parsed);
const ROIAlignParam& param = nnvm::get<ROIAlignParam>(attrs.parsed);

const int count = out_grad[0].Size();
const int num_rois = in_data[0].size(0);
Expand Down Expand Up @@ -534,7 +534,7 @@ He, Kaiming, et al. "Mask R-CNN." ICCV, 2017
.set_attr<nnvm::FInferShape>("FInferShape", [](const nnvm::NodeAttrs& attrs,
std::vector<TShape> *in_shape, std::vector<TShape> *out_shape){
using namespace mshadow;
const ROIAlignParam param = nnvm::get<ROIAlignParam>(attrs.parsed);
const ROIAlignParam& param = nnvm::get<ROIAlignParam>(attrs.parsed);
CHECK_EQ(in_shape->size(), 2) << "Input:[data, rois]";
// data: [batch_size, c, h, w]
TShape dshape = in_shape->at(roialign::kData);
Expand Down

0 comments on commit 74a75d5

Please sign in to comment.