diff --git a/datafusion-cli/src/exec.rs b/datafusion-cli/src/exec.rs index 19bff0528b778..cfbc97ecbe238 100644 --- a/datafusion-cli/src/exec.rs +++ b/datafusion-cli/src/exec.rs @@ -81,7 +81,7 @@ pub async fn exec_from_lines( Ok(_) => {} Err(err) => eprintln!("{err}"), } - query = "".to_owned(); + query = "".to_string(); } else { query.push('\n'); } diff --git a/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs b/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs index a16c1ae3333fb..3a5d50bba07fc 100644 --- a/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs +++ b/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs @@ -203,13 +203,9 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> { Arc::new(builder.finish()) } - fn build_primitive_array( - &self, - rows: RecordSlice, - col_name: &str, - ) -> ArrayRef + fn build_primitive_array(&self, rows: RecordSlice, col_name: &str) -> ArrayRef where - T: ArrowNumericType, + T: ArrowNumericType + Resolver, T::Native: num_traits::cast::NumCast, { Arc::new( diff --git a/datafusion/core/src/datasource/file_format/json.rs b/datafusion/core/src/datasource/file_format/json.rs index efc0aa4328d85..9f526e1c87b4b 100644 --- a/datafusion/core/src/datasource/file_format/json.rs +++ b/datafusion/core/src/datasource/file_format/json.rs @@ -219,7 +219,7 @@ impl BatchSerializer for JsonSerializer { pub struct JsonSink { /// Config options for writing data config: FileSinkConfig, - /// + /// Writer options for underlying Json writer writer_options: JsonWriterOptions, } diff --git a/datafusion/core/src/datasource/file_format/parquet.rs b/datafusion/core/src/datasource/file_format/parquet.rs index 7ec7d4540fff0..fa379eb5b4450 100644 --- a/datafusion/core/src/datasource/file_format/parquet.rs +++ b/datafusion/core/src/datasource/file_format/parquet.rs @@ -536,7 +536,7 @@ async fn fetch_statistics( pub struct ParquetSink { /// Config options for writing data config: FileSinkConfig, - /// + /// Underlying parquet options parquet_options: TableParquetOptions, /// File metadata from successfully produced parquet files. The Mutex is only used /// to allow inserting to HashMap from behind borrowed reference in DataSink::write_all. diff --git a/datafusion/core/src/datasource/physical_plan/file_stream.rs b/datafusion/core/src/datasource/physical_plan/file_stream.rs index 619bcb29e2cc2..9732d08c7a1d4 100644 --- a/datafusion/core/src/datasource/physical_plan/file_stream.rs +++ b/datafusion/core/src/datasource/physical_plan/file_stream.rs @@ -519,7 +519,6 @@ mod tests { use std::sync::Arc; use super::*; - use crate::datasource::file_format::write::BatchSerializer; use crate::datasource::object_store::ObjectStoreUrl; use crate::prelude::SessionContext; use crate::test::{make_partition, object_store::register_test_store}; @@ -527,8 +526,6 @@ mod tests { use arrow_schema::Schema; use datafusion_common::{internal_err, Statistics}; - use bytes::Bytes; - /// Test `FileOpener` which will simulate errors during file opening or scanning #[derive(Default)] struct TestOpener { @@ -974,14 +971,4 @@ mod tests { Ok(()) } - - struct TestSerializer { - bytes: Bytes, - } - - impl BatchSerializer for TestSerializer { - fn serialize(&self, _batch: RecordBatch, _initial: bool) -> Result { - Ok(self.bytes.clone()) - } - } } diff --git a/datafusion/core/src/execution/context/avro.rs b/datafusion/core/src/execution/context/avro.rs index 2703529264e0a..e829f6123eab4 100644 --- a/datafusion/core/src/execution/context/avro.rs +++ b/datafusion/core/src/execution/context/avro.rs @@ -57,29 +57,3 @@ impl SessionContext { Ok(()) } } - -#[cfg(test)] -mod tests { - use super::*; - - use async_trait::async_trait; - - // Test for compilation error when calling read_* functions from an #[async_trait] function. - // See https://github.com/apache/datafusion/issues/1154 - #[async_trait] - trait CallReadTrait { - async fn call_read_avro(&self) -> DataFrame; - } - - struct CallRead {} - - #[async_trait] - impl CallReadTrait for CallRead { - async fn call_read_avro(&self) -> DataFrame { - let ctx = SessionContext::new(); - ctx.read_avro("dummy", AvroReadOptions::default()) - .await - .unwrap() - } - } -} diff --git a/datafusion/core/src/execution/context/csv.rs b/datafusion/core/src/execution/context/csv.rs index 504ebf6d77cf0..6ba1a14600cb0 100644 --- a/datafusion/core/src/execution/context/csv.rs +++ b/datafusion/core/src/execution/context/csv.rs @@ -90,7 +90,6 @@ mod tests { use crate::assert_batches_eq; use crate::test_util::{plan_and_collect, populate_csv_partitions}; - use async_trait::async_trait; use tempfile::TempDir; #[tokio::test] @@ -125,21 +124,4 @@ mod tests { Ok(()) } - - // Test for compilation error when calling read_* functions from an #[async_trait] function. - // See https://github.com/apache/datafusion/issues/1154 - #[async_trait] - trait CallReadTrait { - async fn call_read_csv(&self) -> DataFrame; - } - - struct CallRead {} - - #[async_trait] - impl CallReadTrait for CallRead { - async fn call_read_csv(&self) -> DataFrame { - let ctx = SessionContext::new(); - ctx.read_csv("dummy", CsvReadOptions::new()).await.unwrap() - } - } } diff --git a/datafusion/core/src/execution/context/parquet.rs b/datafusion/core/src/execution/context/parquet.rs index f7ab15d95baa5..fef20df6e69dc 100644 --- a/datafusion/core/src/execution/context/parquet.rs +++ b/datafusion/core/src/execution/context/parquet.rs @@ -84,7 +84,6 @@ mod tests { use datafusion_common::config::TableParquetOptions; use datafusion_execution::config::SessionConfig; - use async_trait::async_trait; use tempfile::tempdir; #[tokio::test] @@ -331,23 +330,4 @@ mod tests { assert_eq!(total_rows, 5); Ok(()) } - - // Test for compilation error when calling read_* functions from an #[async_trait] function. - // See https://github.com/apache/datafusion/issues/1154 - #[async_trait] - trait CallReadTrait { - async fn call_read_parquet(&self) -> DataFrame; - } - - struct CallRead {} - - #[async_trait] - impl CallReadTrait for CallRead { - async fn call_read_parquet(&self) -> DataFrame { - let ctx = SessionContext::new(); - ctx.read_parquet("dummy", ParquetReadOptions::default()) - .await - .unwrap() - } - } } diff --git a/datafusion/core/src/physical_optimizer/enforce_distribution.rs b/datafusion/core/src/physical_optimizer/enforce_distribution.rs index e8fc37cc22bee..c07f2c5dcf249 100644 --- a/datafusion/core/src/physical_optimizer/enforce_distribution.rs +++ b/datafusion/core/src/physical_optimizer/enforce_distribution.rs @@ -411,7 +411,7 @@ fn adjust_input_keys_ordering( } else { // By default, push down the parent requirements to children for child in requirements.children.iter_mut() { - child.data = requirements.data.clone(); + child.data.clone_from(&requirements.data); } } Ok(Transformed::yes(requirements)) diff --git a/datafusion/core/src/physical_planner.rs b/datafusion/core/src/physical_planner.rs index a041ab31f7cf6..391ded84eab9f 100644 --- a/datafusion/core/src/physical_planner.rs +++ b/datafusion/core/src/physical_planner.rs @@ -2035,7 +2035,7 @@ impl DefaultPhysicalPlanner { let config = &session_state.config_options().explain; if !config.physical_plan_only { - stringified_plans = e.stringified_plans.clone(); + stringified_plans.clone_from(&e.stringified_plans); if e.logical_optimization_succeeded { stringified_plans.push(e.plan.to_stringified(FinalLogicalPlan)); } diff --git a/datafusion/core/tests/user_defined/user_defined_table_functions.rs b/datafusion/core/tests/user_defined/user_defined_table_functions.rs index b5d10b1c5b9ba..73428515692dc 100644 --- a/datafusion/core/tests/user_defined/user_defined_table_functions.rs +++ b/datafusion/core/tests/user_defined/user_defined_table_functions.rs @@ -185,7 +185,7 @@ impl TableFunctionImpl for SimpleCsvTableFunc { for expr in exprs { match expr { Expr::Literal(ScalarValue::Utf8(Some(ref path))) => { - filepath = path.clone() + filepath.clone_from(path); } expr => new_exprs.push(expr.clone()), } diff --git a/datafusion/expr/src/logical_plan/plan.rs b/datafusion/expr/src/logical_plan/plan.rs index 64c5b56a40802..23f5280377a30 100644 --- a/datafusion/expr/src/logical_plan/plan.rs +++ b/datafusion/expr/src/logical_plan/plan.rs @@ -2961,54 +2961,6 @@ digraph { .unwrap() } - /// Extension plan that panic when trying to access its input plan - #[derive(Debug)] - struct NoChildExtension { - empty_schema: DFSchemaRef, - } - - impl UserDefinedLogicalNode for NoChildExtension { - fn as_any(&self) -> &dyn std::any::Any { - unimplemented!() - } - - fn name(&self) -> &str { - unimplemented!() - } - - fn inputs(&self) -> Vec<&LogicalPlan> { - panic!("Should not be called") - } - - fn schema(&self) -> &DFSchemaRef { - &self.empty_schema - } - - fn expressions(&self) -> Vec { - unimplemented!() - } - - fn fmt_for_explain(&self, _: &mut fmt::Formatter) -> fmt::Result { - unimplemented!() - } - - fn from_template( - &self, - _: &[Expr], - _: &[LogicalPlan], - ) -> Arc { - unimplemented!() - } - - fn dyn_hash(&self, _: &mut dyn Hasher) { - unimplemented!() - } - - fn dyn_eq(&self, _: &dyn UserDefinedLogicalNode) -> bool { - unimplemented!() - } - } - #[test] fn test_replace_invalid_placeholder() { // test empty placeholder diff --git a/datafusion/functions-array/src/resize.rs b/datafusion/functions-array/src/resize.rs index c5855d0544947..561e98e8b76f2 100644 --- a/datafusion/functions-array/src/resize.rs +++ b/datafusion/functions-array/src/resize.rs @@ -112,15 +112,12 @@ pub(crate) fn array_resize_inner(arg: &[ArrayRef]) -> Result { } /// array_resize keep the original array and append the default element to the end -fn general_list_resize( +fn general_list_resize>( array: &GenericListArray, count_array: &Int64Array, field: &FieldRef, default_element: Option, -) -> Result -where - O: TryInto, -{ +) -> Result { let data_type = array.value_type(); let values = array.values(); diff --git a/datafusion/functions-array/src/reverse.rs b/datafusion/functions-array/src/reverse.rs index 8324c407bd869..9be6405657033 100644 --- a/datafusion/functions-array/src/reverse.rs +++ b/datafusion/functions-array/src/reverse.rs @@ -99,13 +99,10 @@ pub fn array_reverse_inner(arg: &[ArrayRef]) -> Result { } } -fn general_array_reverse( +fn general_array_reverse>( array: &GenericListArray, field: &FieldRef, -) -> Result -where - O: TryFrom, -{ +) -> Result { let values = array.values(); let original_data = values.to_data(); let capacity = Capacities::Array(original_data.len()); diff --git a/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs b/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs index b97974c859996..ba865fa1e9447 100644 --- a/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs +++ b/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs @@ -288,7 +288,7 @@ fn delete_duplicate_predicates(or_predicates: Vec) -> Predicate { Predicate::And { args } => { let args_num = args.len(); if shortest_exprs.is_empty() || args_num < shortest_exprs_len { - shortest_exprs = (*args).clone(); + shortest_exprs.clone_from(args); shortest_exprs_len = args_num; } }