Skip to content

Commit aa0c846

Browse files
committed
[MISC] Fix compilation warnings of unnecessary std::move() calls
Fix compilation warnings of "moving a temporary object prevents copy elision"
1 parent c3d3596 commit aa0c846

File tree

18 files changed

+28
-29
lines changed

18 files changed

+28
-29
lines changed

src/relax/analysis/graph_partitioner.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ size_t GraphPartitioner::CountFusedArgs(const IndexedForwardGraph& graph,
286286
}
287287

288288
void GraphPartitioner::InitGroups(const IndexedForwardGraph& graph) {
289-
auto args_counter = [this](const tvm::Object* obj) {
289+
auto args_counter = [](const tvm::Object* obj) {
290290
size_t args_num = 0;
291291
if (auto call_node = GetRef<ObjectRef>(obj).as<CallNode>()) {
292292
for (auto& it : call_node->args) {

src/relax/ir/expr.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -621,7 +621,7 @@ Function::Function(Array<Var> params, Expr body, Optional<StructInfo> ret_struct
621621
ObjectPtr<FunctionNode> n = make_object<FunctionNode>();
622622
n->params = std::move(params);
623623
n->body = std::move(body);
624-
n->ret_struct_info = std::move(ret_struct_info.value());
624+
n->ret_struct_info = ret_struct_info.value();
625625
n->is_pure = is_pure;
626626
n->struct_info_ = std::move(func_sinfo);
627627
n->attrs = std::move(attrs);

src/relax/op/nn/attention.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ Expr attention(Expr query, Expr key, Expr value, Optional<Expr> bias, Optional<F
3737

3838
if (bias) {
3939
return Call(Op::Get("relax.nn.attention_bias"),
40-
{std::move(query), std::move(key), std::move(value), std::move(bias.value())},
40+
{std::move(query), std::move(key), std::move(value), bias.value()},
4141
Attrs(attrs), {});
4242
}
4343
return Call(Op::Get("relax.nn.attention"), {std::move(query), std::move(key), std::move(value)},

src/relax/op/nn/convolution.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -595,7 +595,7 @@ Expr conv1d_transpose(Expr data, Expr weight, Array<IntImm> strides, Array<IntIm
595595
attrs->groups = groups;
596596
attrs->data_layout = data_layout;
597597
attrs->kernel_layout = std::move(kernel_layout);
598-
attrs->out_layout = std::move(out_layout.value_or(data_layout));
598+
attrs->out_layout = out_layout.value_or(data_layout);
599599
attrs->out_dtype = std::move(out_dtype.value_or(DataType::Void()));
600600
const Op& op = Op::Get("relax.nn.conv1d_transpose");
601601
return Call(op, {data, weight}, Attrs(attrs), {});
@@ -732,7 +732,7 @@ Expr conv2d_transpose(Expr data, Expr weight, Array<IntImm> strides, Array<IntIm
732732
attrs->groups = groups;
733733
attrs->data_layout = data_layout;
734734
attrs->kernel_layout = std::move(kernel_layout);
735-
attrs->out_layout = std::move(out_layout.value_or(data_layout));
735+
attrs->out_layout = out_layout.value_or(data_layout);
736736
attrs->out_dtype = std::move(out_dtype.value_or(DataType::Void()));
737737
const Op& op = Op::Get("relax.nn.conv2d_transpose");
738738
return Call(op, {data, weight}, Attrs(attrs), {});

src/relax/op/nn/nn.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -905,7 +905,7 @@ Expr nll_loss(Expr predictions, Expr targets, Optional<Expr> weights, String red
905905

906906
static const Op& op = Op::Get("relax.nn.nll_loss");
907907
if (weights.defined()) {
908-
return Call(op, {std::move(predictions), std::move(targets), std::move(weights.value())},
908+
return Call(op, {std::move(predictions), std::move(targets), weights.value()},
909909
Attrs{attrs}, {});
910910
} else {
911911
return Call(op, {std::move(predictions), std::move(targets)}, Attrs{attrs}, {});

src/relax/op/tensor/grad.cc

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,10 +101,9 @@ Expr nll_loss_backward(Expr output_grad, Expr predictions, Expr targets, Optiona
101101

102102
static const Op& op = Op::Get("relax.grad.nll_loss_backward");
103103
if (weights.defined()) {
104-
return Call(op,
105-
{std::move(output_grad), std::move(predictions), std::move(targets),
106-
std::move(weights.value())},
107-
Attrs{attrs}, {});
104+
return Call(
105+
op, {std::move(output_grad), std::move(predictions), std::move(targets), weights.value()},
106+
Attrs{attrs}, {});
108107
} else {
109108
return Call(op, {std::move(output_grad), std::move(predictions), std::move(targets)},
110109
Attrs{attrs}, {});

src/relax/transform/convert_layout.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ class LayoutConvertMutator : public ExprMutator {
127127
ObjectPtr<LayoutTransformAttrs> attrs = make_object<LayoutTransformAttrs>();
128128
Array<IntImm> axis_separator;
129129
Array<IntImm> input_axis_separator;
130-
attrs->index_map = std::move(Downcast<IndexMap>(LoadJSON(SaveJSON(index_map))));
130+
attrs->index_map = Downcast<IndexMap>(LoadJSON(SaveJSON(index_map)));
131131
attrs->axis_separators = std::move(axis_separator);
132132
attrs->input_axis_separators = std::move(input_axis_separator);
133133
const Op& layout_transform_op_ = Op::Get("relax.layout_transform");

src/relax/transform/fuse_tir.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -846,7 +846,7 @@ class FusedTIRConstructor : public ExprVisitor {
846846
if (is_inplace) {
847847
const auto* attrs = call->attrs.as<CallTIRInplaceAttrs>();
848848
CHECK(attrs) << "Must have CallTIRInplaceAttrs for an in-place call";
849-
output_idxs = std::move(GetInplaceOutputIndices(attrs->inplace_indices, num_inputs));
849+
output_idxs = GetInplaceOutputIndices(attrs->inplace_indices, num_inputs);
850850
} else {
851851
for (size_t i = 0; i < output_size; i++) {
852852
output_idxs.push_back(num_inputs + i);

src/relax/transform/legalize_ops.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class LegalizeMutator : public ExprMutator {
6363
bool enable_warning)
6464
: ExprMutator(mod), mod_(std::move(mod)), enable_warning_(enable_warning) {
6565
if (cmap) {
66-
cmap_ = std::move(cmap.value());
66+
cmap_ = cmap.value();
6767
}
6868
}
6969

src/relax/transform/remove_purity_checking.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ class PurityRemover : public ExprMutator {
3636
bool purity = func->is_pure;
3737
auto ret = func;
3838
if (purity) {
39-
ret = std::move(WithAttr<Function>(func, relax::attr::kForcePure, true));
39+
ret = WithAttr<Function>(func, relax::attr::kForcePure, true);
4040
}
4141
auto new_body = VisitExpr(ret->body);
4242
if (!new_body.same_as(ret->body)) {

0 commit comments

Comments
 (0)