Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable clone_on_ref_ptr clippy lint on functions* #11468

Merged
merged 3 commits into from
Jul 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 15 additions & 6 deletions datafusion/functions-aggregate/src/correlation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

use std::any::Any;
use std::fmt::Debug;
use std::sync::Arc;

use arrow::compute::{and, filter, is_not_null};
use arrow::{
Expand Down Expand Up @@ -192,13 +193,21 @@ impl Accumulator for CorrelationAccumulator {

fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> {
let states_c = [
states[0].clone(),
states[1].clone(),
states[3].clone(),
states[5].clone(),
Arc::clone(&states[0]),
Arc::clone(&states[1]),
Arc::clone(&states[3]),
Arc::clone(&states[5]),
];
let states_s1 = [
Arc::clone(&states[0]),
Arc::clone(&states[1]),
Arc::clone(&states[2]),
];
let states_s2 = [
Arc::clone(&states[0]),
Arc::clone(&states[3]),
Arc::clone(&states[4]),
];
let states_s1 = [states[0].clone(), states[1].clone(), states[2].clone()];
let states_s2 = [states[0].clone(), states[3].clone(), states[4].clone()];

self.covar.merge_batch(&states_c)?;
self.stddev1.merge_batch(&states_s1)?;
Expand Down
16 changes: 8 additions & 8 deletions datafusion/functions-aggregate/src/first_last.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ impl FirstValueAccumulator {
.iter()
.zip(self.ordering_req.iter())
.map(|(values, req)| SortColumn {
values: values.clone(),
values: Arc::clone(values),
options: Some(req.options),
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -547,7 +547,7 @@ impl LastValueAccumulator {
// Take the reverse ordering requirement. This enables us to
// use "fetch = 1" to get the last value.
SortColumn {
values: values.clone(),
values: Arc::clone(values),
options: Some(!req.options),
}
})
Expand Down Expand Up @@ -676,7 +676,7 @@ fn convert_to_sort_cols(
arrs.iter()
.zip(sort_exprs.iter())
.map(|(item, sort_expr)| SortColumn {
values: item.clone(),
values: Arc::clone(item),
options: Some(sort_expr.options),
})
.collect::<Vec<_>>()
Expand Down Expand Up @@ -707,7 +707,7 @@ mod tests {
for arr in arrs {
// Once first_value is set, accumulator should remember it.
// It shouldn't update first_value for each new batch
first_accumulator.update_batch(&[arr.clone()])?;
first_accumulator.update_batch(&[Arc::clone(&arr)])?;
// last_value should be updated for each new batch.
last_accumulator.update_batch(&[arr])?;
}
Expand All @@ -733,12 +733,12 @@ mod tests {
let mut first_accumulator =
FirstValueAccumulator::try_new(&DataType::Int64, &[], vec![], false)?;

first_accumulator.update_batch(&[arrs[0].clone()])?;
first_accumulator.update_batch(&[Arc::clone(&arrs[0])])?;
let state1 = first_accumulator.state()?;

let mut first_accumulator =
FirstValueAccumulator::try_new(&DataType::Int64, &[], vec![], false)?;
first_accumulator.update_batch(&[arrs[1].clone()])?;
first_accumulator.update_batch(&[Arc::clone(&arrs[1])])?;
let state2 = first_accumulator.state()?;

assert_eq!(state1.len(), state2.len());
Expand All @@ -763,12 +763,12 @@ mod tests {
let mut last_accumulator =
LastValueAccumulator::try_new(&DataType::Int64, &[], vec![], false)?;

last_accumulator.update_batch(&[arrs[0].clone()])?;
last_accumulator.update_batch(&[Arc::clone(&arrs[0])])?;
let state1 = last_accumulator.state()?;

let mut last_accumulator =
LastValueAccumulator::try_new(&DataType::Int64, &[], vec![], false)?;
last_accumulator.update_batch(&[arrs[1].clone()])?;
last_accumulator.update_batch(&[Arc::clone(&arrs[1])])?;
let state2 = last_accumulator.state()?;

assert_eq!(state1.len(), state2.len());
Expand Down
2 changes: 2 additions & 0 deletions datafusion/functions-aggregate/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Make cheap clones clear: https://github.com/apache/datafusion/issues/11143
#![deny(clippy::clone_on_ref_ptr)]

//! Aggregate Function packages for [DataFusion].
//!
Expand Down
4 changes: 2 additions & 2 deletions datafusion/functions-array/src/array_has.rs
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ fn general_array_has_dispatch<O: OffsetSizeTrait>(

let converter = RowConverter::new(vec![SortField::new(array.value_type())])?;

let element = sub_array.clone();
let element = Arc::clone(sub_array);
let sub_array = if comparison_type != ComparisonType::Single {
as_generic_list_array::<O>(sub_array)?
} else {
Expand All @@ -292,7 +292,7 @@ fn general_array_has_dispatch<O: OffsetSizeTrait>(
let sub_arr_values = if comparison_type != ComparisonType::Single {
converter.convert_columns(&[sub_arr])?
} else {
converter.convert_columns(&[element.clone()])?
converter.convert_columns(&[Arc::clone(&element)])?
};

let mut res = match comparison_type {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/functions-array/src/concat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ pub(crate) fn array_concat_inner(args: &[ArrayRef]) -> Result<ArrayRef> {
return not_impl_err!("Array is not type '{base_type:?}'.");
}
if !base_type.eq(&DataType::Null) {
new_args.push(arg.clone());
new_args.push(Arc::clone(arg));
}
}

Expand Down
4 changes: 2 additions & 2 deletions datafusion/functions-array/src/flatten.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl ScalarUDFImpl for Flatten {
get_base_type(field.data_type())
}
Null | List(_) | LargeList(_) => Ok(data_type.to_owned()),
FixedSizeList(field, _) => Ok(List(field.clone())),
FixedSizeList(field, _) => Ok(List(Arc::clone(field))),
_ => exec_err!(
"Not reachable, data_type should be List, LargeList or FixedSizeList"
),
Expand Down Expand Up @@ -115,7 +115,7 @@ pub fn flatten_inner(args: &[ArrayRef]) -> Result<ArrayRef> {
let flattened_array = flatten_internal::<i64>(list_arr.clone(), None)?;
Ok(Arc::new(flattened_array) as ArrayRef)
}
Null => Ok(args[0].clone()),
Null => Ok(Arc::clone(&args[0])),
_ => {
exec_err!("flatten does not support type '{array_type:?}'")
}
Expand Down
2 changes: 2 additions & 0 deletions datafusion/functions-array/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Make cheap clones clear: https://github.com/apache/datafusion/issues/11143
#![deny(clippy::clone_on_ref_ptr)]

//! Array Functions for [DataFusion].
//!
Expand Down
8 changes: 4 additions & 4 deletions datafusion/functions-array/src/resize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ impl ScalarUDFImpl for ArrayResize {

fn return_type(&self, arg_types: &[DataType]) -> Result<DataType> {
match &arg_types[0] {
List(field) | FixedSizeList(field, _) => Ok(List(field.clone())),
LargeList(field) => Ok(LargeList(field.clone())),
List(field) | FixedSizeList(field, _) => Ok(List(Arc::clone(field))),
LargeList(field) => Ok(LargeList(Arc::clone(field))),
_ => exec_err!(
"Not reachable, data_type should be List, LargeList or FixedSizeList"
),
Expand All @@ -92,7 +92,7 @@ pub(crate) fn array_resize_inner(arg: &[ArrayRef]) -> Result<ArrayRef> {

let new_len = as_int64_array(&arg[1])?;
let new_element = if arg.len() == 3 {
Some(arg[2].clone())
Some(Arc::clone(&arg[2]))
} else {
None
};
Expand Down Expand Up @@ -168,7 +168,7 @@ fn general_list_resize<O: OffsetSizeTrait + TryInto<i64>>(

let data = mutable.freeze();
Ok(Arc::new(GenericListArray::<O>::try_new(
field.clone(),
Arc::clone(field),
OffsetBuffer::<O>::new(offsets.into()),
arrow_array::make_array(data),
None,
Expand Down
4 changes: 2 additions & 2 deletions datafusion/functions-array/src/reverse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ pub fn array_reverse_inner(arg: &[ArrayRef]) -> Result<ArrayRef> {
let array = as_large_list_array(&arg[0])?;
general_array_reverse::<i64>(array, field)
}
Null => Ok(arg[0].clone()),
Null => Ok(Arc::clone(&arg[0])),
array_type => exec_err!("array_reverse does not support type '{array_type:?}'."),
}
}
Expand Down Expand Up @@ -137,7 +137,7 @@ fn general_array_reverse<O: OffsetSizeTrait + TryFrom<i64>>(

let data = mutable.freeze();
Ok(Arc::new(GenericListArray::<O>::try_new(
field.clone(),
Arc::clone(field),
OffsetBuffer::<O>::new(offsets.into()),
arrow_array::make_array(data),
Some(nulls.into()),
Expand Down
12 changes: 6 additions & 6 deletions datafusion/functions-array/src/set_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ fn array_distinct_inner(args: &[ArrayRef]) -> Result<ArrayRef> {

// handle null
if args[0].data_type() == &Null {
return Ok(args[0].clone());
return Ok(Arc::clone(&args[0]));
}

// handle for list & largelist
Expand Down Expand Up @@ -314,7 +314,7 @@ fn generic_set_lists<OffsetSize: OffsetSizeTrait>(
offsets.push(last_offset + OffsetSize::usize_as(rows.len()));
let arrays = converter.convert_rows(rows)?;
let array = match arrays.first() {
Some(array) => array.clone(),
Some(array) => Arc::clone(array),
None => {
return internal_err!("{set_op}: failed to get array from rows");
}
Expand Down Expand Up @@ -370,12 +370,12 @@ fn general_set_op(
(List(field), List(_)) => {
let array1 = as_list_array(&array1)?;
let array2 = as_list_array(&array2)?;
generic_set_lists::<i32>(array1, array2, field.clone(), set_op)
generic_set_lists::<i32>(array1, array2, Arc::clone(field), set_op)
}
(LargeList(field), LargeList(_)) => {
let array1 = as_large_list_array(&array1)?;
let array2 = as_large_list_array(&array2)?;
generic_set_lists::<i64>(array1, array2, field.clone(), set_op)
generic_set_lists::<i64>(array1, array2, Arc::clone(field), set_op)
}
(data_type1, data_type2) => {
internal_err!(
Expand Down Expand Up @@ -426,7 +426,7 @@ fn general_array_distinct<OffsetSize: OffsetSizeTrait>(
offsets.push(last_offset + OffsetSize::usize_as(rows.len()));
let arrays = converter.convert_rows(rows)?;
let array = match arrays.first() {
Some(array) => array.clone(),
Some(array) => Arc::clone(array),
None => {
return internal_err!("array_distinct: failed to get array from rows")
}
Expand All @@ -437,7 +437,7 @@ fn general_array_distinct<OffsetSize: OffsetSizeTrait>(
let new_arrays_ref = new_arrays.iter().map(|v| v.as_ref()).collect::<Vec<_>>();
let values = compute::concat(&new_arrays_ref)?;
Ok(Arc::new(GenericListArray::<OffsetSize>::try_new(
field.clone(),
Arc::clone(field),
offsets,
values,
None,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/functions-array/src/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ pub fn array_sort_inner(args: &[ArrayRef]) -> Result<ArrayRef> {
let list_array = as_list_array(&args[0])?;
let row_count = list_array.len();
if row_count == 0 {
return Ok(args[0].clone());
return Ok(Arc::clone(&args[0]));
}

let mut array_lengths = vec![];
Expand Down
2 changes: 1 addition & 1 deletion datafusion/functions-array/src/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ pub(super) fn array_to_string_inner(args: &[ArrayRef]) -> Result<ArrayRef> {
let delimiter = delimiters[0].unwrap();
let s = compute_array_to_string(
&mut arg,
arr.clone(),
Arc::clone(arr),
delimiter.to_string(),
null_string,
with_null_string,
Expand Down
14 changes: 8 additions & 6 deletions datafusion/functions-array/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ pub(crate) fn align_array_dimensions<O: OffsetSizeTrait>(
.zip(args_ndim.iter())
.map(|(array, ndim)| {
if ndim < max_ndim {
let mut aligned_array = array.clone();
let mut aligned_array = Arc::clone(&array);
for _ in 0..(max_ndim - ndim) {
let data_type = aligned_array.data_type().to_owned();
let array_lengths = vec![1; aligned_array.len()];
Expand All @@ -120,7 +120,7 @@ pub(crate) fn align_array_dimensions<O: OffsetSizeTrait>(
}
Ok(aligned_array)
} else {
Ok(array.clone())
Ok(Arc::clone(&array))
}
})
.collect();
Expand Down Expand Up @@ -277,10 +277,12 @@ mod tests {
Some(vec![Some(6), Some(7), Some(8)]),
]));

let array2d_1 =
Arc::new(array_into_list_array_nullable(array1d_1.clone())) as ArrayRef;
let array2d_2 =
Arc::new(array_into_list_array_nullable(array1d_2.clone())) as ArrayRef;
let array2d_1 = Arc::new(array_into_list_array_nullable(
Arc::clone(&array1d_1) as ArrayRef
)) as ArrayRef;
let array2d_2 = Arc::new(array_into_list_array_nullable(
Arc::clone(&array1d_2) as ArrayRef
)) as ArrayRef;

let res = align_array_dimensions::<i32>(vec![
array1d_1.to_owned(),
Expand Down
3 changes: 2 additions & 1 deletion datafusion/functions/benches/concat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
// specific language governing permissions and limitations
// under the License.

use arrow::array::ArrayRef;
use arrow::util::bench_util::create_string_array_with_len;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use datafusion_common::ScalarValue;
Expand All @@ -26,7 +27,7 @@ fn create_args(size: usize, str_len: usize) -> Vec<ColumnarValue> {
let array = Arc::new(create_string_array_with_len::<i32>(size, 0.2, str_len));
let scalar = ScalarValue::Utf8(Some(", ".to_string()));
vec![
ColumnarValue::Array(array.clone()),
ColumnarValue::Array(Arc::clone(&array) as ArrayRef),
ColumnarValue::Scalar(scalar),
ColumnarValue::Array(array),
]
Expand Down
24 changes: 16 additions & 8 deletions datafusion/functions/benches/regx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,12 @@ fn criterion_benchmark(c: &mut Criterion) {

b.iter(|| {
black_box(
regexp_like::<i32>(&[data.clone(), regex.clone(), flags.clone()])
.expect("regexp_like should work on valid values"),
regexp_like::<i32>(&[
Arc::clone(&data),
Arc::clone(&regex),
Arc::clone(&flags),
])
.expect("regexp_like should work on valid values"),
)
})
});
Expand All @@ -97,8 +101,12 @@ fn criterion_benchmark(c: &mut Criterion) {

b.iter(|| {
black_box(
regexp_match::<i32>(&[data.clone(), regex.clone(), flags.clone()])
.expect("regexp_match should work on valid values"),
regexp_match::<i32>(&[
Arc::clone(&data),
Arc::clone(&regex),
Arc::clone(&flags),
])
.expect("regexp_match should work on valid values"),
)
})
});
Expand All @@ -115,10 +123,10 @@ fn criterion_benchmark(c: &mut Criterion) {
b.iter(|| {
black_box(
regexp_replace::<i32>(&[
data.clone(),
regex.clone(),
replacement.clone(),
flags.clone(),
Arc::clone(&data),
Arc::clone(&regex),
Arc::clone(&replacement),
Arc::clone(&flags),
])
.expect("regexp_replace should work on valid values"),
)
Expand Down
Loading