diff --git a/.gitignore b/.gitignore index 4ae32925d908e..8466a72adaec8 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,7 @@ docker_cache *.orig .*.swp .*.swo +*.pending-snap venv/* diff --git a/Cargo.lock b/Cargo.lock index 9dd40437fba9a..efdd102b9c881 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2632,6 +2632,7 @@ dependencies = [ "env_logger", "indexmap 2.11.3", "insta", + "itertools 0.14.0", "log", "paste", "recursive", diff --git a/datafusion-cli/CONTRIBUTING.md b/datafusion-cli/CONTRIBUTING.md index 7c19c8e8bf6a2..8be656ec4ee34 100644 --- a/datafusion-cli/CONTRIBUTING.md +++ b/datafusion-cli/CONTRIBUTING.md @@ -21,10 +21,16 @@ ## Running Tests -Tests can be run using `cargo` +First check out test files with ```shell -cargo test +git submodule update --init +``` + +Then run all the tests with + +```shell +cargo test --all-targets ``` ## Running Storage Integration Tests diff --git a/datafusion-examples/examples/custom_file_casts.rs b/datafusion-examples/examples/custom_file_casts.rs index e30ea1fb7ebe7..65ca096820640 100644 --- a/datafusion-examples/examples/custom_file_casts.rs +++ b/datafusion-examples/examples/custom_file_casts.rs @@ -183,14 +183,18 @@ impl PhysicalExprAdapter for CustomCastsPhysicalExprAdapter { // For example, [DataFusion Comet](https://github.com/apache/datafusion-comet) has a [custom cast kernel](https://github.com/apache/datafusion-comet/blob/b4ac876ab420ed403ac7fc8e1b29f42f1f442566/native/spark-expr/src/conversion_funcs/cast.rs#L133-L138). expr.transform(|expr| { if let Some(cast) = expr.as_any().downcast_ref::() { - let input_data_type = cast.expr().data_type(&self.physical_file_schema)?; + let input_data_type = + cast.expr().data_type(&self.physical_file_schema)?; let output_data_type = cast.data_type(&self.physical_file_schema)?; if !cast.is_bigger_cast(&input_data_type) { - return not_impl_err!("Unsupported CAST from {input_data_type:?} to {output_data_type:?}") + return not_impl_err!( + "Unsupported CAST from {input_data_type} to {output_data_type}" + ); } } Ok(Transformed::no(expr)) - }).data() + }) + .data() } fn with_partition_values( diff --git a/datafusion/catalog/src/information_schema.rs b/datafusion/catalog/src/information_schema.rs index 17dbc31d958bb..d733551f44051 100644 --- a/datafusion/catalog/src/information_schema.rs +++ b/datafusion/catalog/src/information_schema.rs @@ -480,7 +480,7 @@ fn get_udwf_args_and_return_types( #[inline] fn remove_native_type_prefix(native_type: NativeType) -> String { - format!("{native_type:?}") + format!("{native_type}") } #[async_trait] @@ -827,8 +827,7 @@ impl InformationSchemaColumnsBuilder { self.is_nullables.append_value(nullable_str); // "System supplied type" --> Use debug format of the datatype - self.data_types - .append_value(format!("{:?}", field.data_type())); + self.data_types.append_value(field.data_type().to_string()); // "If data_type identifies a character or bit string type, the // declared maximum length; null for all other data types or diff --git a/datafusion/common/src/dfschema.rs b/datafusion/common/src/dfschema.rs index 48d25550312ce..d69810db1965c 100644 --- a/datafusion/common/src/dfschema.rs +++ b/datafusion/common/src/dfschema.rs @@ -669,8 +669,8 @@ impl DFSchema { )) { _plan_err!( - "Schema mismatch: Expected field '{}' with type {:?}, \ - but got '{}' with type {:?}.", + "Schema mismatch: Expected field '{}' with type {}, \ + but got '{}' with type {}.", f1.name(), f1.data_type(), f2.name(), @@ -1063,7 +1063,7 @@ fn format_simple_data_type(data_type: &DataType) -> String { format!("decimal256({precision}, {scale})") } DataType::Null => "null".to_string(), - _ => format!("{data_type:?}").to_lowercase(), + _ => format!("{data_type}").to_lowercase(), } } @@ -1308,8 +1308,8 @@ impl SchemaExt for Schema { .try_for_each(|(f1, f2)| { if f1.name() != f2.name() || (!DFSchema::datatype_is_logically_equal(f1.data_type(), f2.data_type()) && !can_cast_types(f2.data_type(), f1.data_type())) { _plan_err!( - "Inserting query schema mismatch: Expected table field '{}' with type {:?}, \ - but got '{}' with type {:?}.", + "Inserting query schema mismatch: Expected table field '{}' with type {}, \ + but got '{}' with type {}.", f1.name(), f1.data_type(), f2.name(), diff --git a/datafusion/common/src/nested_struct.rs b/datafusion/common/src/nested_struct.rs index 6e8a380df9215..38060e370bfa1 100644 --- a/datafusion/common/src/nested_struct.rs +++ b/datafusion/common/src/nested_struct.rs @@ -87,7 +87,7 @@ fn cast_struct_column( } else { // Return error if source is not a struct type _plan_err!( - "Cannot cast column of type {:?} to struct type. Source must be a struct to cast to struct.", + "Cannot cast column of type {} to struct type. Source must be a struct to cast to struct.", source_col.data_type() ) } @@ -128,7 +128,7 @@ fn cast_struct_column( /// The struct casting logic requires that the source column must already be a struct type. /// This makes the function useful for: /// - Schema evolution scenarios where struct layouts change over time -/// - Data migration between different struct schemas +/// - Data migration between different struct schemas /// - Type-safe data processing pipelines that maintain struct type integrity /// /// # Arguments @@ -165,7 +165,7 @@ pub fn cast_column( /// Validates compatibility between source and target struct fields for casting operations. /// /// This function implements comprehensive struct compatibility checking by examining: -/// - Field name matching between source and target structs +/// - Field name matching between source and target structs /// - Type castability for each matching field (including recursive struct validation) /// - Proper handling of missing fields (target fields not in source are allowed - filled with nulls) /// - Proper handling of extra fields (source fields not in target are allowed - ignored) @@ -173,7 +173,7 @@ pub fn cast_column( /// # Compatibility Rules /// - **Field Matching**: Fields are matched by name (case-sensitive) /// - **Missing Target Fields**: Allowed - will be filled with null values during casting -/// - **Extra Source Fields**: Allowed - will be ignored during casting +/// - **Extra Source Fields**: Allowed - will be ignored during casting /// - **Type Compatibility**: Each matching field must be castable using Arrow's type system /// - **Nested Structs**: Recursively validates nested struct compatibility /// @@ -188,7 +188,7 @@ pub fn cast_column( /// # Examples /// ```text /// // Compatible: source has extra field, target has missing field -/// // Source: {a: i32, b: string, c: f64} +/// // Source: {a: i32, b: string, c: f64} /// // Target: {a: i64, d: bool} /// // Result: Ok(()) - 'a' can cast i32->i64, 'b','c' ignored, 'd' filled with nulls /// @@ -230,7 +230,7 @@ pub fn validate_struct_compatibility( target_field.data_type(), ) { return _plan_err!( - "Cannot cast struct field '{}' from type {:?} to type {:?}", + "Cannot cast struct field '{}' from type {} to type {}", target_field.name(), source_field.data_type(), target_field.data_type() diff --git a/datafusion/common/src/param_value.rs b/datafusion/common/src/param_value.rs index d2802c096da1b..7582cff56f87a 100644 --- a/datafusion/common/src/param_value.rs +++ b/datafusion/common/src/param_value.rs @@ -48,7 +48,7 @@ impl ParamValues { for (i, (param_type, value)) in iter.enumerate() { if *param_type != value.data_type() { return _plan_err!( - "Expected parameter of type {:?}, got {:?} at index {}", + "Expected parameter of type {}, got {:?} at index {}", param_type, value.data_type(), i diff --git a/datafusion/common/src/scalar/mod.rs b/datafusion/common/src/scalar/mod.rs index c5e764272b8f9..67287546f4380 100644 --- a/datafusion/common/src/scalar/mod.rs +++ b/datafusion/common/src/scalar/mod.rs @@ -1137,7 +1137,7 @@ impl ScalarValue { DataType::Null => ScalarValue::Null, _ => { return _not_impl_err!( - "Can't create a null scalar from data_type \"{data_type:?}\"" + "Can't create a null scalar from data_type \"{data_type}\"" ); } }) @@ -1193,7 +1193,7 @@ impl ScalarValue { match datatype { DataType::Float32 => Ok(ScalarValue::from(std::f32::consts::PI)), DataType::Float64 => Ok(ScalarValue::from(std::f64::consts::PI)), - _ => _internal_err!("PI is not supported for data type: {:?}", datatype), + _ => _internal_err!("PI is not supported for data type: {}", datatype), } } @@ -1203,7 +1203,7 @@ impl ScalarValue { DataType::Float32 => Ok(ScalarValue::from(consts::PI_UPPER_F32)), DataType::Float64 => Ok(ScalarValue::from(consts::PI_UPPER_F64)), _ => { - _internal_err!("PI_UPPER is not supported for data type: {:?}", datatype) + _internal_err!("PI_UPPER is not supported for data type: {}", datatype) } } } @@ -1214,7 +1214,7 @@ impl ScalarValue { DataType::Float32 => Ok(ScalarValue::from(consts::NEGATIVE_PI_LOWER_F32)), DataType::Float64 => Ok(ScalarValue::from(consts::NEGATIVE_PI_LOWER_F64)), _ => { - _internal_err!("-PI_LOWER is not supported for data type: {:?}", datatype) + _internal_err!("-PI_LOWER is not supported for data type: {}", datatype) } } } @@ -1225,10 +1225,7 @@ impl ScalarValue { DataType::Float32 => Ok(ScalarValue::from(consts::FRAC_PI_2_UPPER_F32)), DataType::Float64 => Ok(ScalarValue::from(consts::FRAC_PI_2_UPPER_F64)), _ => { - _internal_err!( - "PI_UPPER/2 is not supported for data type: {:?}", - datatype - ) + _internal_err!("PI_UPPER/2 is not supported for data type: {}", datatype) } } } @@ -1243,10 +1240,7 @@ impl ScalarValue { Ok(ScalarValue::from(consts::NEGATIVE_FRAC_PI_2_LOWER_F64)) } _ => { - _internal_err!( - "-PI/2_LOWER is not supported for data type: {:?}", - datatype - ) + _internal_err!("-PI/2_LOWER is not supported for data type: {}", datatype) } } } @@ -1256,7 +1250,7 @@ impl ScalarValue { match datatype { DataType::Float32 => Ok(ScalarValue::from(-std::f32::consts::PI)), DataType::Float64 => Ok(ScalarValue::from(-std::f64::consts::PI)), - _ => _internal_err!("-PI is not supported for data type: {:?}", datatype), + _ => _internal_err!("-PI is not supported for data type: {}", datatype), } } @@ -1265,7 +1259,7 @@ impl ScalarValue { match datatype { DataType::Float32 => Ok(ScalarValue::from(std::f32::consts::FRAC_PI_2)), DataType::Float64 => Ok(ScalarValue::from(std::f64::consts::FRAC_PI_2)), - _ => _internal_err!("PI/2 is not supported for data type: {:?}", datatype), + _ => _internal_err!("PI/2 is not supported for data type: {}", datatype), } } @@ -1274,7 +1268,7 @@ impl ScalarValue { match datatype { DataType::Float32 => Ok(ScalarValue::from(-std::f32::consts::FRAC_PI_2)), DataType::Float64 => Ok(ScalarValue::from(-std::f64::consts::FRAC_PI_2)), - _ => _internal_err!("-PI/2 is not supported for data type: {:?}", datatype), + _ => _internal_err!("-PI/2 is not supported for data type: {}", datatype), } } @@ -1284,7 +1278,7 @@ impl ScalarValue { DataType::Float32 => Ok(ScalarValue::from(f32::INFINITY)), DataType::Float64 => Ok(ScalarValue::from(f64::INFINITY)), _ => { - _internal_err!("Infinity is not supported for data type: {:?}", datatype) + _internal_err!("Infinity is not supported for data type: {}", datatype) } } } @@ -1296,7 +1290,7 @@ impl ScalarValue { DataType::Float64 => Ok(ScalarValue::from(f64::NEG_INFINITY)), _ => { _internal_err!( - "Negative Infinity is not supported for data type: {:?}", + "Negative Infinity is not supported for data type: {}", datatype ) } @@ -1369,7 +1363,7 @@ impl ScalarValue { DataType::Date64 => ScalarValue::Date64(Some(0)), _ => { return _not_impl_err!( - "Can't create a zero scalar from data_type \"{datatype:?}\"" + "Can't create a zero scalar from data_type \"{datatype}\"" ); } }) @@ -1507,7 +1501,7 @@ impl ScalarValue { // Unsupported types for now _ => { _not_impl_err!( - "Default value for data_type \"{datatype:?}\" is not implemented yet" + "Default value for data_type \"{datatype}\" is not implemented yet" ) } } @@ -1557,7 +1551,7 @@ impl ScalarValue { } _ => { return _not_impl_err!( - "Can't create an one scalar from data_type \"{datatype:?}\"" + "Can't create an one scalar from data_type \"{datatype}\"" ); } }) @@ -1603,7 +1597,7 @@ impl ScalarValue { } _ => { return _not_impl_err!( - "Can't create a negative one scalar from data_type \"{datatype:?}\"" + "Can't create a negative one scalar from data_type \"{datatype}\"" ); } }) @@ -1656,7 +1650,7 @@ impl ScalarValue { } _ => { return _not_impl_err!( - "Can't create a ten scalar from data_type \"{datatype:?}\"" + "Can't create a ten scalar from data_type \"{datatype}\"" ); } }) @@ -2364,7 +2358,7 @@ impl ScalarValue { DataType::UInt16 => dict_from_values::(values)?, DataType::UInt32 => dict_from_values::(values)?, DataType::UInt64 => dict_from_values::(values)?, - _ => unreachable!("Invalid dictionary keys type: {:?}", key_type), + _ => unreachable!("Invalid dictionary keys type: {}", key_type), } } DataType::FixedSizeBinary(size) => { @@ -2375,7 +2369,7 @@ impl ScalarValue { } else { _exec_err!( "Inconsistent types in ScalarValue::iter_to_array. \ - Expected {data_type:?}, got {sv:?}" + Expected {data_type}, got {sv:?}" ) } }) @@ -2937,7 +2931,7 @@ impl ScalarValue { DataType::UInt16 => dict_from_scalar::(v, size)?, DataType::UInt32 => dict_from_scalar::(v, size)?, DataType::UInt64 => dict_from_scalar::(v, size)?, - _ => unreachable!("Invalid dictionary keys type: {:?}", key_type), + _ => unreachable!("Invalid dictionary keys type: {}", key_type), } } ScalarValue::Null => get_or_create_cached_null_array(size), @@ -3197,7 +3191,7 @@ impl ScalarValue { DataType::UInt16 => get_dict_value::(array, index)?, DataType::UInt32 => get_dict_value::(array, index)?, DataType::UInt64 => get_dict_value::(array, index)?, - _ => unreachable!("Invalid dictionary keys type: {:?}", key_type), + _ => unreachable!("Invalid dictionary keys type: {}", key_type), }; // look up the index in the values dictionary let value = match values_index { @@ -3571,7 +3565,7 @@ impl ScalarValue { DataType::UInt16 => get_dict_value::(array, index)?, DataType::UInt32 => get_dict_value::(array, index)?, DataType::UInt64 => get_dict_value::(array, index)?, - _ => unreachable!("Invalid dictionary keys type: {:?}", key_type), + _ => unreachable!("Invalid dictionary keys type: {}", key_type), }; // was the value in the array non null? match values_index { diff --git a/datafusion/common/src/types/native.rs b/datafusion/common/src/types/native.rs index 76629e555b8c4..223e04ba994a0 100644 --- a/datafusion/common/src/types/native.rs +++ b/datafusion/common/src/types/native.rs @@ -185,7 +185,7 @@ pub enum NativeType { impl Display for NativeType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "NativeType::{self:?}") + write!(f, "{self:?}") // TODO: nicer formatting } } @@ -352,10 +352,10 @@ impl LogicalType for NativeType { } _ => { return _internal_err!( - "Unavailable default cast for native type {:?} from physical type {:?}", - self, - origin - ) + "Unavailable default cast for native type {} from physical type {}", + self, + origin + ) } }) } diff --git a/datafusion/core/src/physical_planner.rs b/datafusion/core/src/physical_planner.rs index 15d325288b071..8bac7664fcaa3 100644 --- a/datafusion/core/src/physical_planner.rs +++ b/datafusion/core/src/physical_planner.rs @@ -2115,7 +2115,7 @@ impl DefaultPhysicalPlanner { // "System supplied type" --> Use debug format of the datatype let data_type = field.data_type(); - data_types.append_value(format!("{data_type:?}")); + data_types.append_value(format!("{data_type}")); // "YES if the column is possibly nullable, NO if it is known not nullable. " let nullable_str = if field.is_nullable() { "YES" } else { "NO" }; @@ -2742,7 +2742,7 @@ mod tests { assert_contains!( &e, - r#"Error during planning: Can not find compatible types to compare Boolean with [Struct([Field { name: "foo", data_type: Boolean, nullable: false, dict_id: 0, dict_is_ordered: false, metadata: {} }]), Utf8]"# + r#"Error during planning: Can not find compatible types to compare Boolean with [Struct(foo Boolean), Utf8]"# ); Ok(()) diff --git a/datafusion/core/tests/memory_limit/memory_limit_validation/sort_mem_validation.rs b/datafusion/core/tests/memory_limit/memory_limit_validation/sort_mem_validation.rs index a8002cf400a20..14c8fc4c41ecb 100644 --- a/datafusion/core/tests/memory_limit/memory_limit_validation/sort_mem_validation.rs +++ b/datafusion/core/tests/memory_limit/memory_limit_validation/sort_mem_validation.rs @@ -98,11 +98,9 @@ fn init_once() { fn spawn_test_process(test: &str) { init_once(); - let test_path = format!( - "memory_limit::memory_limit_validation::sort_mem_validation::{}", - test - ); - info!("Running test: {}", test_path); + let test_path = + format!("memory_limit::memory_limit_validation::sort_mem_validation::{test}"); + info!("Running test: {test_path}"); // Run the test command let output = Command::new("cargo") diff --git a/datafusion/core/tests/parquet/schema_adapter.rs b/datafusion/core/tests/parquet/schema_adapter.rs index f685ccdc9fb9e..a25ed7131ef80 100644 --- a/datafusion/core/tests/parquet/schema_adapter.rs +++ b/datafusion/core/tests/parquet/schema_adapter.rs @@ -119,7 +119,7 @@ impl SchemaMapper for CustomSchemaMapper { let default_value = match field.data_type() { DataType::Int64 => ScalarValue::Int64(Some(0)), DataType::Utf8 => ScalarValue::Utf8(Some("a".to_string())), - _ => unimplemented!("Unsupported data type: {:?}", field.data_type()), + _ => unimplemented!("Unsupported data type: {}", field.data_type()), }; output_columns .push(default_value.to_array_of_size(batch.num_rows()).unwrap()); @@ -199,7 +199,7 @@ impl PhysicalExprAdapter for CustomPhysicalExprAdapter { DataType::Int64 => ScalarValue::Int64(Some(1)), DataType::Utf8 => ScalarValue::Utf8(Some("b".to_string())), _ => unimplemented!( - "Unsupported data type: {:?}", + "Unsupported data type: {}", field.data_type() ), }; diff --git a/datafusion/datasource-avro/src/avro_to_arrow/arrow_array_reader.rs b/datafusion/datasource-avro/src/avro_to_arrow/arrow_array_reader.rs index 598484d2157c9..a80f18cf818fe 100644 --- a/datafusion/datasource-avro/src/avro_to_arrow/arrow_array_reader.rs +++ b/datafusion/datasource-avro/src/avro_to_arrow/arrow_array_reader.rs @@ -153,7 +153,7 @@ impl AvroArrowArrayReader<'_, R> { .map(|value| match value { Ok(Value::Record(v)) => Ok(v), Err(e) => Err(ArrowError::ParseError(format!( - "Failed to parse avro value: {e:?}" + "Failed to parse avro value: {e}" ))), other => Err(ArrowError::ParseError(format!( "Row needs to be of type object, got: {other:?}" @@ -281,7 +281,7 @@ impl AvroArrowArrayReader<'_, R> { self.list_array_string_array_builder::(&dtype, col_name, rows) } ref e => Err(SchemaError(format!( - "Data type is currently not supported for dictionaries in list : {e:?}" + "Data type is currently not supported for dictionaries in list : {e}" ))), } } @@ -308,7 +308,7 @@ impl AvroArrowArrayReader<'_, R> { } e => { return Err(SchemaError(format!( - "Nested list data builder type is not supported: {e:?}" + "Nested list data builder type is not supported: {e}" ))) } }; @@ -373,7 +373,7 @@ impl AvroArrowArrayReader<'_, R> { } e => { return Err(SchemaError(format!( - "Nested list data builder type is not supported: {e:?}" + "Nested list data builder type is not supported: {e}" ))) } } @@ -610,7 +610,7 @@ impl AvroArrowArrayReader<'_, R> { } datatype => { return Err(SchemaError(format!( - "Nested list of {datatype:?} not supported" + "Nested list of {datatype} not supported" ))); } }; @@ -831,7 +831,7 @@ impl AvroArrowArrayReader<'_, R> { } _ => { return Err(SchemaError(format!( - "type {:?} not supported", + "type {} not supported", field.data_type() ))) } @@ -936,7 +936,7 @@ fn resolve_string(v: &Value) -> ArrowResult> { Value::Null => Ok(None), other => Err(AvroError::new(AvroErrorDetails::GetString(other.clone()))), } - .map_err(|e| SchemaError(format!("expected resolvable string : {e:?}"))) + .map_err(|e| SchemaError(format!("expected resolvable string : {e}"))) } fn resolve_u8(v: &Value) -> Option { diff --git a/datafusion/datasource/src/schema_adapter.rs b/datafusion/datasource/src/schema_adapter.rs index bd5833bb78622..4c7b37113d58d 100644 --- a/datafusion/datasource/src/schema_adapter.rs +++ b/datafusion/datasource/src/schema_adapter.rs @@ -267,7 +267,7 @@ pub(crate) fn can_cast_field( Ok(true) } else { plan_err!( - "Cannot cast file schema field {} of type {:?} to table schema field of type {:?}", + "Cannot cast file schema field {} of type {} to table schema field of type {}", file_field.name(), file_field.data_type(), table_field.data_type() diff --git a/datafusion/expr-common/src/casts.rs b/datafusion/expr-common/src/casts.rs index c31d4f77c6a7f..684452d538ce2 100644 --- a/datafusion/expr-common/src/casts.rs +++ b/datafusion/expr-common/src/casts.rs @@ -339,7 +339,7 @@ mod tests { let actual_value = try_cast_literal_to_type(&literal, &target_type); println!("expect_cast: "); - println!(" {literal:?} --> {target_type:?}"); + println!(" {literal:?} --> {target_type}"); println!(" expected_result: {expected_result:?}"); println!(" actual_result: {actual_value:?}"); diff --git a/datafusion/expr-common/src/type_coercion/aggregates.rs b/datafusion/expr-common/src/type_coercion/aggregates.rs index e9377ce7de5a2..b4f393eceb296 100644 --- a/datafusion/expr-common/src/type_coercion/aggregates.rs +++ b/datafusion/expr-common/src/type_coercion/aggregates.rs @@ -171,7 +171,7 @@ pub fn variance_return_type(arg_type: &DataType) -> Result { if NUMERICS.contains(arg_type) { Ok(DataType::Float64) } else { - plan_err!("VAR does not support {arg_type:?}") + plan_err!("VAR does not support {arg_type}") } } @@ -180,7 +180,7 @@ pub fn covariance_return_type(arg_type: &DataType) -> Result { if NUMERICS.contains(arg_type) { Ok(DataType::Float64) } else { - plan_err!("COVAR does not support {arg_type:?}") + plan_err!("COVAR does not support {arg_type}") } } @@ -189,7 +189,7 @@ pub fn correlation_return_type(arg_type: &DataType) -> Result { if NUMERICS.contains(arg_type) { Ok(DataType::Float64) } else { - plan_err!("CORR does not support {arg_type:?}") + plan_err!("CORR does not support {arg_type}") } } @@ -304,7 +304,7 @@ pub fn coerce_avg_type(func_name: &str, arg_types: &[DataType]) -> Result coerced_type(func_name, v.as_ref()), _ => { plan_err!( - "The function {:?} does not support inputs of type {:?}.", + "The function {:?} does not support inputs of type {}.", func_name, data_type ) diff --git a/datafusion/expr-common/src/type_coercion/binary.rs b/datafusion/expr-common/src/type_coercion/binary.rs index f344a71451d47..a102523b21a3e 100644 --- a/datafusion/expr-common/src/type_coercion/binary.rs +++ b/datafusion/expr-common/src/type_coercion/binary.rs @@ -204,7 +204,7 @@ impl<'a> BinaryTypeCoercer<'a> { } And | Or => if matches!((lhs, rhs), (Boolean | Null, Boolean | Null)) { // Logical binary boolean operators can only be evaluated for - // boolean or null arguments. + // boolean or null arguments. Ok(Signature::uniform(Boolean)) } else { plan_err!( @@ -694,7 +694,7 @@ pub fn try_type_union_resolution_with_struct( keys_string = Some(keys); } } else { - return exec_err!("Expect to get struct but got {}", data_type); + return exec_err!("Expect to get struct but got {data_type}"); } } @@ -726,7 +726,7 @@ pub fn try_type_union_resolution_with_struct( } } } else { - return exec_err!("Expect to get struct but got {}", data_type); + return exec_err!("Expect to get struct but got {data_type}"); } } diff --git a/datafusion/expr/Cargo.toml b/datafusion/expr/Cargo.toml index 2a8e5ecfb1ab9..e6b2734cfff34 100644 --- a/datafusion/expr/Cargo.toml +++ b/datafusion/expr/Cargo.toml @@ -53,7 +53,7 @@ datafusion-functions-aggregate-common = { workspace = true } datafusion-functions-window-common = { workspace = true } datafusion-physical-expr-common = { workspace = true } indexmap = { workspace = true } -itertools = "0.14.0" +itertools = { workspace = true } paste = "^1.0" recursive = { workspace = true, optional = true } serde_json = { workspace = true } diff --git a/datafusion/expr/src/conditional_expressions.rs b/datafusion/expr/src/conditional_expressions.rs index 69525ea52137c..8910b2e9b56a3 100644 --- a/datafusion/expr/src/conditional_expressions.rs +++ b/datafusion/expr/src/conditional_expressions.rs @@ -20,6 +20,7 @@ use crate::expr::Case; use crate::{expr_schema::ExprSchemable, Expr}; use arrow::datatypes::DataType; use datafusion_common::{plan_err, DFSchema, HashSet, Result}; +use itertools::Itertools as _; /// Helper struct for building [Expr::Case] pub struct CaseBuilder { @@ -81,9 +82,12 @@ impl CaseBuilder { // Cannot verify types until execution type } else { let unique_types: HashSet<&DataType> = then_types.iter().collect(); - if unique_types.len() != 1 { + if unique_types.is_empty() { + return plan_err!("CASE expression 'then' values had no data types"); + } else if unique_types.len() != 1 { return plan_err!( - "CASE expression 'then' values had multiple data types: {unique_types:?}" + "CASE expression 'then' values had multiple data types: {}", + unique_types.iter().join(", ") ); } } diff --git a/datafusion/expr/src/expr.rs b/datafusion/expr/src/expr.rs index f72fb4c309c47..adbc6d8ae7291 100644 --- a/datafusion/expr/src/expr.rs +++ b/datafusion/expr/src/expr.rs @@ -3482,10 +3482,10 @@ impl Display for Expr { write!(f, "END") } Expr::Cast(Cast { expr, data_type }) => { - write!(f, "CAST({expr} AS {data_type:?})") + write!(f, "CAST({expr} AS {data_type})") } Expr::TryCast(TryCast { expr, data_type }) => { - write!(f, "TRY_CAST({expr} AS {data_type:?})") + write!(f, "TRY_CAST({expr} AS {data_type})") } Expr::Not(expr) => write!(f, "NOT {expr}"), Expr::Negative(expr) => write!(f, "(- {expr})"), diff --git a/datafusion/expr/src/expr_schema.rs b/datafusion/expr/src/expr_schema.rs index 27f58d7d810ab..534fbf01036a0 100644 --- a/datafusion/expr/src/expr_schema.rs +++ b/datafusion/expr/src/expr_schema.rs @@ -642,7 +642,7 @@ impl ExprSchemable for Expr { _ => Ok(Expr::Cast(Cast::new(Box::new(self), cast_to_type.clone()))), } } else { - plan_err!("Cannot automatically convert {this_type:?} to {cast_to_type:?}") + plan_err!("Cannot automatically convert {this_type} to {cast_to_type}") } } } diff --git a/datafusion/expr/src/logical_plan/plan.rs b/datafusion/expr/src/logical_plan/plan.rs index 7dc750a35c0ec..9c40d2cb48cd2 100644 --- a/datafusion/expr/src/logical_plan/plan.rs +++ b/datafusion/expr/src/logical_plan/plan.rs @@ -4155,10 +4155,7 @@ fn get_unnested_columns( })) } _ => { - return internal_err!( - "trying to unnest on invalid data type {:?}", - data_type - ); + return internal_err!("trying to unnest on invalid data type {data_type}"); } }; Ok(qualified_columns) @@ -4182,7 +4179,7 @@ fn get_unnested_list_datatype_recursive( _ => {} }; - internal_err!("trying to unnest on invalid data type {:?}", data_type) + internal_err!("trying to unnest on invalid data type {data_type}") } #[cfg(test)] @@ -5542,7 +5539,7 @@ mod tests { )?; let fields = join.schema.fields(); - assert_eq!(fields.len(), 6, "Expected 6 fields for {join_type:?} join"); + assert_eq!(fields.len(), 6, "Expected 6 fields for {join_type} join"); for (i, field) in fields.iter().enumerate() { let expected_nullable = match (i, &join_type) { diff --git a/datafusion/expr/src/logical_plan/statement.rs b/datafusion/expr/src/logical_plan/statement.rs index 72eb6b39bb47b..6d3fe9fa75acf 100644 --- a/datafusion/expr/src/logical_plan/statement.rs +++ b/datafusion/expr/src/logical_plan/statement.rs @@ -17,6 +17,7 @@ use arrow::datatypes::DataType; use datafusion_common::{DFSchema, DFSchemaRef}; +use itertools::Itertools as _; use std::fmt::{self, Display}; use std::sync::{Arc, LazyLock}; @@ -110,7 +111,7 @@ impl Statement { Statement::Prepare(Prepare { name, data_types, .. }) => { - write!(f, "Prepare: {name:?} {data_types:?}") + write!(f, "Prepare: {name:?} [{}]", data_types.iter().join(", ")) } Statement::Execute(Execute { name, parameters, .. diff --git a/datafusion/expr/src/test/function_stub.rs b/datafusion/expr/src/test/function_stub.rs index 3feab09bbd194..c5b7c751b9eda 100644 --- a/datafusion/expr/src/test/function_stub.rs +++ b/datafusion/expr/src/test/function_stub.rs @@ -141,7 +141,7 @@ impl AggregateUDFImpl for Sum { dt if dt.is_signed_integer() => Ok(DataType::Int64), dt if dt.is_unsigned_integer() => Ok(DataType::UInt64), dt if dt.is_floating() => Ok(DataType::Float64), - _ => exec_err!("Sum not supported for {}", data_type), + _ => exec_err!("Sum not supported for {data_type}"), } } diff --git a/datafusion/expr/src/type_coercion/functions.rs b/datafusion/expr/src/type_coercion/functions.rs index d776d07775fd3..9d15360ca8641 100644 --- a/datafusion/expr/src/type_coercion/functions.rs +++ b/datafusion/expr/src/type_coercion/functions.rs @@ -36,6 +36,7 @@ use datafusion_expr_common::{ type_coercion::binary::comparison_coercion_numeric, type_coercion::binary::string_coercion, }; +use itertools::Itertools as _; use std::sync::Arc; /// Performs type coercion for scalar function arguments. @@ -278,7 +279,8 @@ fn try_coerce_types( // none possible -> Error plan_err!( - "Failed to coerce arguments to satisfy a call to '{function_name}' function: coercion from {current_types:?} to the signature {type_signature:?} failed" + "Failed to coerce arguments to satisfy a call to '{function_name}' function: coercion from {} to the signature {type_signature:?} failed", + current_types.iter().join(", ") ) } @@ -529,7 +531,7 @@ fn get_valid_types( new_types.push(DataType::Utf8); } else { return plan_err!( - "Function '{function_name}' expects NativeType::String but received {logical_data_type}" + "Function '{function_name}' expects NativeType::String but NativeType::received NativeType::{logical_data_type}" ); } } @@ -589,7 +591,7 @@ fn get_valid_types( if !logical_data_type.is_numeric() { return plan_err!( - "Function '{function_name}' expects NativeType::Numeric but received {logical_data_type}" + "Function '{function_name}' expects NativeType::Numeric but received NativeType::{logical_data_type}" ); } @@ -610,7 +612,7 @@ fn get_valid_types( valid_type = DataType::Float64; } else if !logical_data_type.is_numeric() { return plan_err!( - "Function '{function_name}' expects NativeType::Numeric but received {logical_data_type}" + "Function '{function_name}' expects NativeType::Numeric but received NativeType::{logical_data_type}" ); } @@ -657,7 +659,7 @@ fn get_valid_types( new_types.push(casted_type); } else { return internal_err!( - "Expect {} but received {}, DataType: {}", + "Expect {} but received NativeType::{}, DataType: {}", param.desired_type(), current_native_type, current_type diff --git a/datafusion/functions-aggregate-common/src/min_max.rs b/datafusion/functions-aggregate-common/src/min_max.rs index 806071dd2f583..0aad9b356fba7 100644 --- a/datafusion/functions-aggregate-common/src/min_max.rs +++ b/datafusion/functions-aggregate-common/src/min_max.rs @@ -659,7 +659,7 @@ macro_rules! min_max_batch { other => { // This should have been handled before return datafusion_common::internal_err!( - "Min/Max accumulator not implemented for type {:?}", + "Min/Max accumulator not implemented for type {}", other ); } diff --git a/datafusion/functions-aggregate-common/src/tdigest.rs b/datafusion/functions-aggregate-common/src/tdigest.rs index 38a9292cea8d7..370a640b046a6 100644 --- a/datafusion/functions-aggregate-common/src/tdigest.rs +++ b/datafusion/functions-aggregate-common/src/tdigest.rs @@ -45,7 +45,7 @@ macro_rules! cast_scalar_f64 { ($value:expr ) => { match &$value { ScalarValue::Float64(Some(v)) => *v, - v => panic!("invalid type {:?}", v), + v => panic!("invalid type {}", v), } }; } @@ -56,7 +56,7 @@ macro_rules! cast_scalar_u64 { ($value:expr ) => { match &$value { ScalarValue::UInt64(Some(v)) => *v, - v => panic!("invalid type {:?}", v), + v => panic!("invalid type {}", v), } }; } diff --git a/datafusion/functions-aggregate/src/approx_percentile_cont.rs b/datafusion/functions-aggregate/src/approx_percentile_cont.rs index 640e0e5bac860..aed28546b7222 100644 --- a/datafusion/functions-aggregate/src/approx_percentile_cont.rs +++ b/datafusion/functions-aggregate/src/approx_percentile_cont.rs @@ -521,7 +521,7 @@ impl Accumulator for ApproxPercentileAccumulator { DataType::UInt64 => ScalarValue::UInt64(Some(q as u64)), DataType::Float32 => ScalarValue::Float32(Some(q as f32)), DataType::Float64 => ScalarValue::Float64(Some(q)), - v => unreachable!("unexpected return type {:?}", v), + v => unreachable!("unexpected return type {}", v), }) } diff --git a/datafusion/functions-aggregate/src/average.rs b/datafusion/functions-aggregate/src/average.rs index f7cb74fd55a25..1dd711964a839 100644 --- a/datafusion/functions-aggregate/src/average.rs +++ b/datafusion/functions-aggregate/src/average.rs @@ -605,7 +605,7 @@ where { pub fn new(sum_data_type: &DataType, return_data_type: &DataType, avg_fn: F) -> Self { debug!( - "AvgGroupsAccumulator ({}, sum type: {sum_data_type:?}) --> {return_data_type:?}", + "AvgGroupsAccumulator ({}, sum type: {sum_data_type}) --> {return_data_type}", std::any::type_name::() ); diff --git a/datafusion/functions-aggregate/src/sum.rs b/datafusion/functions-aggregate/src/sum.rs index 445c7dfe6b7af..d974ca22b1d8d 100644 --- a/datafusion/functions-aggregate/src/sum.rs +++ b/datafusion/functions-aggregate/src/sum.rs @@ -151,7 +151,7 @@ impl AggregateUDFImpl for Sum { dt if dt.is_signed_integer() => Ok(DataType::Int64), dt if dt.is_unsigned_integer() => Ok(DataType::UInt64), dt if dt.is_floating() => Ok(DataType::Float64), - _ => exec_err!("Sum not supported for {}", data_type), + _ => exec_err!("Sum not supported for {data_type}"), } } diff --git a/datafusion/functions-nested/src/concat.rs b/datafusion/functions-nested/src/concat.rs index 43a00fefcbd98..9a12db525f954 100644 --- a/datafusion/functions-nested/src/concat.rs +++ b/datafusion/functions-nested/src/concat.rs @@ -319,8 +319,9 @@ impl ScalarUDFImpl for ArrayConcat { } } else { plan_err!( - "Failed to unify argument types of {}: {arg_types:?}", - self.name() + "Failed to unify argument types of {}: [{}]", + self.name(), + arg_types.iter().join(", ") ) } } diff --git a/datafusion/functions-nested/src/extract.rs b/datafusion/functions-nested/src/extract.rs index 7fb38ace69a79..a64fd1426433a 100644 --- a/datafusion/functions-nested/src/extract.rs +++ b/datafusion/functions-nested/src/extract.rs @@ -451,7 +451,7 @@ fn array_slice_inner(args: &[ArrayRef]) -> Result { let array = as_large_list_array(&args[0])?; general_array_slice::(array, from_array, to_array, stride) } - _ => exec_err!("array_slice does not support type: {:?}", array_data_type), + _ => exec_err!("array_slice does not support type: {}", array_data_type), } } @@ -730,10 +730,7 @@ fn array_pop_front_inner(args: &[ArrayRef]) -> Result { let array = as_large_list_array(&args[0])?; general_pop_front_list::(array) } - _ => exec_err!( - "array_pop_front does not support type: {:?}", - array_data_type - ), + _ => exec_err!("array_pop_front does not support type: {}", array_data_type), } } @@ -839,7 +836,7 @@ fn array_pop_back_inner(args: &[ArrayRef]) -> Result { general_pop_back_list::(array) } _ => exec_err!( - "array_pop_back does not support type: {:?}", + "array_pop_back does not support type: {}", array.data_type() ), } @@ -942,7 +939,7 @@ fn array_any_value_inner(args: &[ArrayRef]) -> Result { let array = as_large_list_array(&array)?; general_array_any_value::(array) } - data_type => exec_err!("array_any_value does not support type: {:?}", data_type), + data_type => exec_err!("array_any_value does not support type: {data_type}"), } } diff --git a/datafusion/functions-nested/src/length.rs b/datafusion/functions-nested/src/length.rs index d829fac27b1d4..5c6afb961210e 100644 --- a/datafusion/functions-nested/src/length.rs +++ b/datafusion/functions-nested/src/length.rs @@ -149,7 +149,7 @@ pub fn array_length_inner(args: &[ArrayRef]) -> Result { List(_) => general_array_length::(args), LargeList(_) => general_array_length::(args), FixedSizeList(_, _) => fixed_size_array_length(args), - array_type => exec_err!("array_length does not support type '{array_type:?}'"), + array_type => exec_err!("array_length does not support type '{array_type}'"), } } diff --git a/datafusion/functions-nested/src/make_array.rs b/datafusion/functions-nested/src/make_array.rs index e4dd9b3662cca..97d64c70cd364 100644 --- a/datafusion/functions-nested/src/make_array.rs +++ b/datafusion/functions-nested/src/make_array.rs @@ -39,6 +39,7 @@ use datafusion_expr::{ ColumnarValue, Documentation, ScalarUDFImpl, Signature, Volatility, }; use datafusion_macros::user_doc; +use itertools::Itertools as _; make_udf_expr_and_func!( MakeArray, @@ -132,8 +133,9 @@ impl ScalarUDFImpl for MakeArray { Ok(vec![unified; arg_types.len()]) } else { plan_err!( - "Failed to unify argument types of {}: {arg_types:?}", - self.name() + "Failed to unify argument types of {}: [{}]", + self.name(), + arg_types.iter().join(", ") ) } } diff --git a/datafusion/functions-nested/src/position.rs b/datafusion/functions-nested/src/position.rs index be7ce05b6934a..dae946def8f53 100644 --- a/datafusion/functions-nested/src/position.rs +++ b/datafusion/functions-nested/src/position.rs @@ -147,7 +147,7 @@ pub fn array_position_inner(args: &[ArrayRef]) -> Result { match &args[0].data_type() { List(_) => general_position_dispatch::(args), LargeList(_) => general_position_dispatch::(args), - array_type => exec_err!("array_position does not support type '{array_type:?}'."), + array_type => exec_err!("array_position does not support type '{array_type}'."), } } fn general_position_dispatch(args: &[ArrayRef]) -> Result { @@ -308,7 +308,7 @@ pub fn array_positions_inner(args: &[ArrayRef]) -> Result { general_positions::(arr, element) } array_type => { - exec_err!("array_positions does not support type '{array_type:?}'.") + exec_err!("array_positions does not support type '{array_type}'.") } } } diff --git a/datafusion/functions-nested/src/remove.rs b/datafusion/functions-nested/src/remove.rs index e500d7196783e..54458084138e7 100644 --- a/datafusion/functions-nested/src/remove.rs +++ b/datafusion/functions-nested/src/remove.rs @@ -311,7 +311,7 @@ fn array_remove_internal( general_remove::(list_array, element_array, arr_n) } array_type => { - exec_err!("array_remove_all does not support type '{array_type:?}'.") + exec_err!("array_remove_all does not support type '{array_type}'.") } } } diff --git a/datafusion/functions-nested/src/replace.rs b/datafusion/functions-nested/src/replace.rs index d791c6d1f10bf..59f851a776a18 100644 --- a/datafusion/functions-nested/src/replace.rs +++ b/datafusion/functions-nested/src/replace.rs @@ -430,7 +430,7 @@ pub(crate) fn array_replace_inner(args: &[ArrayRef]) -> Result { general_replace::(list_array, from, to, arr_n) } DataType::Null => Ok(new_null_array(array.data_type(), 1)), - array_type => exec_err!("array_replace does not support type '{array_type:?}'."), + array_type => exec_err!("array_replace does not support type '{array_type}'."), } } @@ -450,7 +450,7 @@ pub(crate) fn array_replace_n_inner(args: &[ArrayRef]) -> Result { } DataType::Null => Ok(new_null_array(array.data_type(), 1)), array_type => { - exec_err!("array_replace_n does not support type '{array_type:?}'.") + exec_err!("array_replace_n does not support type '{array_type}'.") } } } @@ -471,7 +471,7 @@ pub(crate) fn array_replace_all_inner(args: &[ArrayRef]) -> Result { } DataType::Null => Ok(new_null_array(array.data_type(), 1)), array_type => { - exec_err!("array_replace_all does not support type '{array_type:?}'.") + exec_err!("array_replace_all does not support type '{array_type}'.") } } } diff --git a/datafusion/functions-nested/src/resize.rs b/datafusion/functions-nested/src/resize.rs index db27d649a467e..c5af3db00a268 100644 --- a/datafusion/functions-nested/src/resize.rs +++ b/datafusion/functions-nested/src/resize.rs @@ -191,7 +191,7 @@ pub(crate) fn array_resize_inner(arg: &[ArrayRef]) -> Result { let array = as_large_list_array(&arg[0])?; general_list_resize::(array, new_len, field, new_element) } - array_type => exec_err!("array_resize does not support type '{array_type:?}'."), + array_type => exec_err!("array_resize does not support type '{array_type}'."), } } diff --git a/datafusion/functions-nested/src/reverse.rs b/datafusion/functions-nested/src/reverse.rs index 5b134fe5b26a3..2fd444f0f0e4c 100644 --- a/datafusion/functions-nested/src/reverse.rs +++ b/datafusion/functions-nested/src/reverse.rs @@ -133,7 +133,7 @@ pub fn array_reverse_inner(arg: &[ArrayRef]) -> Result { fixed_size_array_reverse(array, field) } Null => Ok(Arc::clone(input_array)), - array_type => exec_err!("array_reverse does not support type '{array_type:?}'."), + array_type => exec_err!("array_reverse does not support type '{array_type}'."), } } diff --git a/datafusion/functions-nested/src/utils.rs b/datafusion/functions-nested/src/utils.rs index ed08a82358748..464301b6ffcf0 100644 --- a/datafusion/functions-nested/src/utils.rs +++ b/datafusion/functions-nested/src/utils.rs @@ -31,6 +31,7 @@ use datafusion_common::cast::{ use datafusion_common::{exec_err, internal_err, plan_err, Result, ScalarValue}; use datafusion_expr::ColumnarValue; +use itertools::Itertools as _; pub(crate) fn check_datatypes(name: &str, args: &[&ArrayRef]) -> Result<()> { let data_type = args[0].data_type(); @@ -39,7 +40,10 @@ pub(crate) fn check_datatypes(name: &str, args: &[&ArrayRef]) -> Result<()> { || arg.data_type().equals_datatype(&DataType::Null) }) { let types = args.iter().map(|arg| arg.data_type()).collect::>(); - return plan_err!("{name} received incompatible types: '{types:?}'."); + return plan_err!( + "{name} received incompatible types: {}", + types.iter().join(", ") + ); } Ok(()) @@ -260,7 +264,7 @@ pub(crate) fn get_map_entry_field(data_type: &DataType) -> Result<&Fields> { } } } - _ => internal_err!("Expected a Map type, got {:?}", data_type), + _ => internal_err!("Expected a Map type, got {data_type}"), } } diff --git a/datafusion/functions/src/core/getfield.rs b/datafusion/functions/src/core/getfield.rs index 81dd3c7797e64..d18bd6e31f72e 100644 --- a/datafusion/functions/src/core/getfield.rs +++ b/datafusion/functions/src/core/getfield.rs @@ -256,7 +256,7 @@ impl ScalarUDFImpl for GetFieldFunc { (DataType::Map(_, _), other) => { let data_type = other.data_type(); if data_type.is_nested() { - exec_err!("unsupported type {:?} for map access", data_type) + exec_err!("unsupported type {} for map access", data_type) } else { process_map_array(array, other.to_array()?) } @@ -275,7 +275,7 @@ impl ScalarUDFImpl for GetFieldFunc { (DataType::Null, _) => Ok(ColumnarValue::Scalar(ScalarValue::Null)), (dt, name) => exec_err!( "get_field is only possible on maps with utf8 indexes or struct \ - with utf8 indexes. Received {dt:?} with {name:?} index" + with utf8 indexes. Received {dt} with {name:?} index" ), } } diff --git a/datafusion/functions/src/core/nvl2.rs b/datafusion/functions/src/core/nvl2.rs index 0f55bddcc91a8..82aa8d2a4cd54 100644 --- a/datafusion/functions/src/core/nvl2.rs +++ b/datafusion/functions/src/core/nvl2.rs @@ -113,7 +113,7 @@ impl ScalarUDFImpl for NVL2Func { if let Some(coerced_type) = coerced_type { Ok(coerced_type) } else { - internal_err!("Coercion from {acc:?} to {x:?} failed.") + internal_err!("Coercion from {acc} to {x} failed.") } })?; Ok(vec![new_type; arg_types.len()]) diff --git a/datafusion/functions/src/datetime/from_unixtime.rs b/datafusion/functions/src/datetime/from_unixtime.rs index ed238f0074af6..5d6adfb6f119a 100644 --- a/datafusion/functions/src/datetime/from_unixtime.rs +++ b/datafusion/functions/src/datetime/from_unixtime.rs @@ -133,7 +133,7 @@ impl ScalarUDFImpl for FromUnixtimeFunc { if args[0].data_type() != Int64 { return exec_err!( - "Unsupported data type {:?} for function from_unixtime", + "Unsupported data type {} for function from_unixtime", args[0].data_type() ); } @@ -145,7 +145,7 @@ impl ScalarUDFImpl for FromUnixtimeFunc { .cast_to(&Timestamp(Second, Some(Arc::from(tz.to_string()))), None), _ => { exec_err!( - "Unsupported data type {:?} for function from_unixtime", + "Unsupported data type {} for function from_unixtime", args[1].data_type() ) } diff --git a/datafusion/functions/src/datetime/to_date.rs b/datafusion/functions/src/datetime/to_date.rs index 3341a5dbb52eb..3840c8d8bbb94 100644 --- a/datafusion/functions/src/datetime/to_date.rs +++ b/datafusion/functions/src/datetime/to_date.rs @@ -39,7 +39,7 @@ Returns the corresponding date. Note: `to_date` returns Date32, which represents its values as the number of days since unix epoch(`1970-01-01`) stored as signed 32 bit value. The largest supported date value is `9999-12-31`.", syntax_example = "to_date('2017-05-31', '%Y-%m-%d')", sql_example = r#"```sql -> select to_date('2023-01-31'); +> select to_date('2023-01-31'); +-------------------------------+ | to_date(Utf8("2023-01-31")) | +-------------------------------+ @@ -150,7 +150,7 @@ impl ScalarUDFImpl for ToDateFunc { } Utf8View | LargeUtf8 | Utf8 => self.to_date(&args), other => { - exec_err!("Unsupported data type {:?} for function to_date", other) + exec_err!("Unsupported data type {} for function to_date", other) } } } diff --git a/datafusion/functions/src/datetime/to_timestamp.rs b/datafusion/functions/src/datetime/to_timestamp.rs index 34914d256cabb..d2a5f8102b379 100644 --- a/datafusion/functions/src/datetime/to_timestamp.rs +++ b/datafusion/functions/src/datetime/to_timestamp.rs @@ -368,10 +368,7 @@ impl ScalarUDFImpl for ToTimestampFunc { } } other => { - exec_err!( - "Unsupported data type {:?} for function to_timestamp", - other - ) + exec_err!("Unsupported data type {other} for function to_timestamp") } } } @@ -424,7 +421,7 @@ impl ScalarUDFImpl for ToTimestampSecondsFunc { } other => { exec_err!( - "Unsupported data type {:?} for function to_timestamp_seconds", + "Unsupported data type {} for function to_timestamp_seconds", other ) } @@ -482,7 +479,7 @@ impl ScalarUDFImpl for ToTimestampMillisFunc { ), other => { exec_err!( - "Unsupported data type {:?} for function to_timestamp_millis", + "Unsupported data type {} for function to_timestamp_millis", other ) } @@ -540,7 +537,7 @@ impl ScalarUDFImpl for ToTimestampMicrosFunc { ), other => { exec_err!( - "Unsupported data type {:?} for function to_timestamp_micros", + "Unsupported data type {} for function to_timestamp_micros", other ) } @@ -597,7 +594,7 @@ impl ScalarUDFImpl for ToTimestampNanosFunc { } other => { exec_err!( - "Unsupported data type {:?} for function to_timestamp_nanos", + "Unsupported data type {} for function to_timestamp_nanos", other ) } diff --git a/datafusion/functions/src/datetime/to_unixtime.rs b/datafusion/functions/src/datetime/to_unixtime.rs index 13c73815f6cda..42651cd537162 100644 --- a/datafusion/functions/src/datetime/to_unixtime.rs +++ b/datafusion/functions/src/datetime/to_unixtime.rs @@ -118,7 +118,7 @@ impl ScalarUDFImpl for ToUnixtimeFunc { .invoke_with_args(args)? .cast_to(&DataType::Int64, None), other => { - exec_err!("Unsupported data type {:?} for function to_unixtime", other) + exec_err!("Unsupported data type {} for function to_unixtime", other) } } } diff --git a/datafusion/functions/src/string/concat.rs b/datafusion/functions/src/string/concat.rs index e322c757ef041..a93e70e714e8b 100644 --- a/datafusion/functions/src/string/concat.rs +++ b/datafusion/functions/src/string/concat.rs @@ -140,7 +140,7 @@ impl ScalarUDFImpl for ConcatFunc { Some(Some(v)) => result.push_str(v), Some(None) => {} // null literal None => plan_err!( - "Concat function does not support scalar type {:?}", + "Concat function does not support scalar type {}", scalar )?, } diff --git a/datafusion/functions/src/string/contains.rs b/datafusion/functions/src/string/contains.rs index 6372131659e80..7e50676933c8d 100644 --- a/datafusion/functions/src/string/contains.rs +++ b/datafusion/functions/src/string/contains.rs @@ -140,7 +140,7 @@ fn contains(args: &[ArrayRef]) -> Result { } } else { exec_err!( - "Unsupported data type {:?}, {:?} for function `contains`.", + "Unsupported data type {}, {:?} for function `contains`.", args[0].data_type(), args[1].data_type() ) diff --git a/datafusion/functions/src/string/replace.rs b/datafusion/functions/src/string/replace.rs index 8a2020c5a01b2..f127b452b2d34 100644 --- a/datafusion/functions/src/string/replace.rs +++ b/datafusion/functions/src/string/replace.rs @@ -145,7 +145,7 @@ impl ScalarUDFImpl for ReplaceFunc { } } else { exec_err!( - "Unsupported data type {:?}, {:?}, {:?} for function replace.", + "Unsupported data type {}, {:?}, {:?} for function replace.", data_types[0], data_types[1], data_types[2] diff --git a/datafusion/optimizer/src/analyzer/type_coercion.rs b/datafusion/optimizer/src/analyzer/type_coercion.rs index e268053781410..fffa1cbe6f219 100644 --- a/datafusion/optimizer/src/analyzer/type_coercion.rs +++ b/datafusion/optimizer/src/analyzer/type_coercion.rs @@ -20,7 +20,7 @@ use std::sync::Arc; use datafusion_expr::binary::BinaryTypeCoercer; -use itertools::izip; +use itertools::{izip, Itertools as _}; use arrow::datatypes::{DataType, Field, IntervalUnit, Schema}; @@ -252,7 +252,7 @@ impl<'a> TypeCoercionRewriter<'a> { if dt.is_integer() || dt.is_null() { expr.cast_to(&DataType::Int64, schema) } else { - plan_err!("Expected {expr_name} to be an integer or null, but got {dt:?}") + plan_err!("Expected {expr_name} to be an integer or null, but got {dt}") } } @@ -351,9 +351,10 @@ impl TreeNodeRewriter for TypeCoercionRewriter<'_> { .data; let expr_type = expr.get_type(self.schema)?; let subquery_type = new_plan.schema().field(0).data_type(); - let common_type = comparison_coercion(&expr_type, subquery_type).ok_or(plan_datafusion_err!( - "expr type {expr_type:?} can't cast to {subquery_type:?} in InSubquery" - ), + let common_type = comparison_coercion(&expr_type, subquery_type).ok_or( + plan_datafusion_err!( + "expr type {expr_type} can't cast to {subquery_type} in InSubquery" + ), )?; let new_subquery = Subquery { subquery: Arc::new(new_plan), @@ -478,7 +479,7 @@ impl TreeNodeRewriter for TypeCoercionRewriter<'_> { get_coerce_type_for_list(&expr_data_type, &list_data_types); match result_type { None => plan_err!( - "Can not find compatible types to compare {expr_data_type:?} with {list_data_types:?}" + "Can not find compatible types to compare {expr_data_type} with [{}]", list_data_types.iter().join(", ") ), Some(coerced_type) => { // find the coerced type @@ -685,7 +686,7 @@ fn coerce_scalar_range_aware( // If type coercion fails, check if the largest type in family works: if let Some(largest_type) = get_widest_type_in_family(target_type) { coerce_scalar(largest_type, value).map_or_else( - |_| exec_err!("Cannot cast {value:?} to {target_type:?}"), + |_| exec_err!("Cannot cast {value:?} to {target_type}"), |_| ScalarValue::try_from(target_type), ) } else { @@ -737,7 +738,7 @@ fn extract_window_frame_target_type(col_type: &DataType) -> Result { } else if let DataType::Dictionary(_, value_type) = col_type { extract_window_frame_target_type(value_type) } else { - internal_err!("Cannot run range queries on datatype: {col_type:?}") + internal_err!("Cannot run range queries on datatype: {col_type}") } } @@ -896,8 +897,9 @@ fn coerce_case_expression(case: Case, schema: &DFSchema) -> Result { get_coerce_type_for_case_expression(&when_types, Some(case_type)); coerced_type.ok_or_else(|| { plan_datafusion_err!( - "Failed to coerce case ({case_type:?}) and when ({when_types:?}) \ - to common types in CASE WHEN expression" + "Failed to coerce case ({case_type}) and when ({}) \ + to common types in CASE WHEN expression", + when_types.iter().join(", ") ) }) }) @@ -905,10 +907,19 @@ fn coerce_case_expression(case: Case, schema: &DFSchema) -> Result { let then_else_coerce_type = get_coerce_type_for_case_expression(&then_types, else_type.as_ref()).ok_or_else( || { - plan_datafusion_err!( - "Failed to coerce then ({then_types:?}) and else ({else_type:?}) \ - to common types in CASE WHEN expression" - ) + if let Some(else_type) = else_type { + plan_datafusion_err!( + "Failed to coerce then ({}) and else ({else_type}) \ + to common types in CASE WHEN expression", + then_types.iter().join(", ") + ) + } else { + plan_datafusion_err!( + "Failed to coerce then ({}) and else (None) \ + to common types in CASE WHEN expression", + then_types.iter().join(", ") + ) + } }, )?; @@ -1681,7 +1692,7 @@ mod test { let err = Projection::try_new(vec![udaf], empty).err().unwrap(); assert!( - err.strip_backtrace().starts_with("Error during planning: Failed to coerce arguments to satisfy a call to 'MY_AVG' function: coercion from [Utf8] to the signature Uniform(1, [Float64]) failed") + err.strip_backtrace().starts_with("Error during planning: Failed to coerce arguments to satisfy a call to 'MY_AVG' function: coercion from Utf8 to the signature Uniform(1, [Float64]) failed") ); Ok(()) } @@ -1742,7 +1753,7 @@ mod test { .err() .unwrap() .strip_backtrace(); - assert!(err.starts_with("Error during planning: Failed to coerce arguments to satisfy a call to 'avg' function: coercion from [Utf8] to the signature Uniform(1, [Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64]) failed")); + assert!(err.starts_with("Error during planning: Failed to coerce arguments to satisfy a call to 'avg' function: coercion from Utf8 to the signature Uniform(1, [Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64]) failed")); Ok(()) } @@ -2231,7 +2242,7 @@ mod test { let err = coerce_case_expression(case, &schema).unwrap_err(); assert_snapshot!( err.strip_backtrace(), - @"Error during planning: Failed to coerce case (Interval(MonthDayNano)) and when ([Float32, Binary, Utf8]) to common types in CASE WHEN expression" + @"Error during planning: Failed to coerce case (Interval(MonthDayNano)) and when (Float32, Binary, Utf8) to common types in CASE WHEN expression" ); let case = Case { @@ -2246,7 +2257,7 @@ mod test { let err = coerce_case_expression(case, &schema).unwrap_err(); assert_snapshot!( err.strip_backtrace(), - @"Error during planning: Failed to coerce then ([Date32, Float32, Binary]) and else (Some(Timestamp(Nanosecond, None))) to common types in CASE WHEN expression" + @"Error during planning: Failed to coerce then (Date32, Float32, Binary) and else (Timestamp(Nanosecond, None)) to common types in CASE WHEN expression" ); Ok(()) diff --git a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs index 97dfc09c4f2a6..8a42648cd5786 100644 --- a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs +++ b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs @@ -1821,7 +1821,7 @@ impl TreeNodeRewriter for Simplifier<'_, S> { // we need to unwrap the cast for cast/try_cast expr, and add cast to the literal let Some(value) = try_cast_literal_to_type(&right_lit_value, &expr_type) else { internal_err!( - "Can't cast the list expr {:?} to type {:?}", + "Can't cast the list expr {:?} to type {}", right_lit_value, &expr_type )? }; diff --git a/datafusion/optimizer/src/simplify_expressions/unwrap_cast.rs b/datafusion/optimizer/src/simplify_expressions/unwrap_cast.rs index eef45b4c18abb..5286cbd7bdf64 100644 --- a/datafusion/optimizer/src/simplify_expressions/unwrap_cast.rs +++ b/datafusion/optimizer/src/simplify_expressions/unwrap_cast.rs @@ -90,7 +90,7 @@ pub(super) fn unwrap_cast_in_comparison_for_binary( // we need to unwrap the cast for cast/try_cast expr, and add cast to the literal let Some(value) = try_cast_literal_to_type(&lit_value, &expr_type) else { return internal_err!( - "Can't cast the literal expr {:?} to type {:?}", + "Can't cast the literal expr {:?} to type {}", &lit_value, &expr_type ); diff --git a/datafusion/physical-expr/src/expressions/binary.rs b/datafusion/physical-expr/src/expressions/binary.rs index abc963355bdac..1f733d53e5f75 100644 --- a/datafusion/physical-expr/src/expressions/binary.rs +++ b/datafusion/physical-expr/src/expressions/binary.rs @@ -179,7 +179,7 @@ macro_rules! binary_string_array_flag_op { compute_utf8_flag_op!($LEFT, $RIGHT, $OP, LargeStringArray, $NOT, $FLAG) }, other => internal_err!( - "Data type {:?} not supported for binary_string_array_flag_op operation '{}' on string array", + "Data type {} not supported for binary_string_array_flag_op operation '{}' on string array", other, stringify!($OP) ), } @@ -258,7 +258,7 @@ macro_rules! binary_string_array_flag_op_scalar { DataType::Utf8View => compute_utf8view_flag_op_scalar!(values, $RIGHT, $OP, StringViewArray, $NOT, $FLAG), DataType::LargeUtf8 => compute_utf8_flag_op_scalar!(values, $RIGHT, $OP, LargeStringArray, $NOT, $FLAG), other => internal_err!( - "Data type {:?} not supported as a dictionary value type for binary_string_array_flag_op_scalar operation '{}' on string array", + "Data type {} not supported as a dictionary value type for binary_string_array_flag_op_scalar operation '{}' on string array", other, stringify!($OP) ), }.map( @@ -273,7 +273,7 @@ macro_rules! binary_string_array_flag_op_scalar { ) }, other => internal_err!( - "Data type {:?} not supported for binary_string_array_flag_op_scalar operation '{}' on string array", + "Data type {} not supported for binary_string_array_flag_op_scalar operation '{}' on string array", other, stringify!($OP) ), }; @@ -731,7 +731,7 @@ fn to_result_type_array( Ok(cast(&array, result_type)?) } else { internal_err!( - "Incompatible Dictionary value type {value_type:?} with result type {result_type:?} of Binary operator {op:?}" + "Incompatible Dictionary value type {value_type} with result type {result_type} of Binary operator {op:?}" ) } } diff --git a/datafusion/physical-expr/src/expressions/binary/kernels.rs b/datafusion/physical-expr/src/expressions/binary/kernels.rs index ae26f3e842418..d2553146cbf19 100644 --- a/datafusion/physical-expr/src/expressions/binary/kernels.rs +++ b/datafusion/physical-expr/src/expressions/binary/kernels.rs @@ -71,7 +71,7 @@ macro_rules! create_dyn_kernel { call_bitwise_kernel!(left, right, $KERNEL, UInt64Array) } other => plan_err!( - "Data type {:?} not supported for binary operation '{}' on dyn arrays", + "Data type {} not supported for binary operation '{}' on dyn arrays", other, stringify!($KERNEL) ), @@ -117,7 +117,7 @@ macro_rules! create_dyn_scalar_kernel { DataType::UInt32 => call_bitwise_scalar_kernel!(array, scalar, $KERNEL, UInt32Array, u32), DataType::UInt64 => call_bitwise_scalar_kernel!(array, scalar, $KERNEL, UInt64Array, u64), other => plan_err!( - "Data type {:?} not supported for binary operation '{}' on dyn arrays", + "Data type {} not supported for binary operation '{}' on dyn arrays", other, stringify!($KERNEL) ), diff --git a/datafusion/physical-expr/src/expressions/cast.rs b/datafusion/physical-expr/src/expressions/cast.rs index c91678317b750..c1fd201e10fda 100644 --- a/datafusion/physical-expr/src/expressions/cast.rs +++ b/datafusion/physical-expr/src/expressions/cast.rs @@ -232,7 +232,7 @@ pub fn cast_with_options( } else if can_cast_types(&expr_type, &cast_type) { Ok(Arc::new(CastExpr::new(expr, cast_type, cast_options))) } else { - not_impl_err!("Unsupported CAST from {expr_type:?} to {cast_type:?}") + not_impl_err!("Unsupported CAST from {expr_type} to {cast_type}") } } diff --git a/datafusion/physical-expr/src/expressions/in_list.rs b/datafusion/physical-expr/src/expressions/in_list.rs index b6fe84ea51579..597ebbdd1234f 100644 --- a/datafusion/physical-expr/src/expressions/in_list.rs +++ b/datafusion/physical-expr/src/expressions/in_list.rs @@ -470,6 +470,7 @@ mod tests { use datafusion_common::plan_err; use datafusion_expr::type_coercion::binary::comparison_coercion; use datafusion_physical_expr_common::physical_expr::fmt_sql; + use itertools::Itertools as _; type InListCastResult = (Arc, Vec>); @@ -488,7 +489,8 @@ mod tests { let result_type = get_coerce_type(expr_type, &list_types); match result_type { None => plan_err!( - "Can not find compatible types to compare {expr_type:?} with {list_types:?}" + "Can not find compatible types to compare {expr_type} with [{}]", + list_types.iter().join(", ") ), Some(data_type) => { // find the coerced type diff --git a/datafusion/physical-expr/src/expressions/try_cast.rs b/datafusion/physical-expr/src/expressions/try_cast.rs index c5a58d5c6d853..b32aabbe5b006 100644 --- a/datafusion/physical-expr/src/expressions/try_cast.rs +++ b/datafusion/physical-expr/src/expressions/try_cast.rs @@ -142,7 +142,7 @@ pub fn try_cast( } else if can_cast_types(&expr_type, &cast_type) { Ok(Arc::new(TryCastExpr::new(expr, cast_type))) } else { - not_impl_err!("Unsupported TRY_CAST from {expr_type:?} to {cast_type:?}") + not_impl_err!("Unsupported TRY_CAST from {expr_type} to {cast_type}") } } diff --git a/datafusion/proto-common/src/to_proto/mod.rs b/datafusion/proto-common/src/to_proto/mod.rs index 2902a9ce54df3..8ba41fee6a5c2 100644 --- a/datafusion/proto-common/src/to_proto/mod.rs +++ b/datafusion/proto-common/src/to_proto/mod.rs @@ -65,7 +65,7 @@ impl std::fmt::Display for Error { write!(f, "{value:?} is invalid as a DataFusion scalar value") } Self::InvalidScalarType(data_type) => { - write!(f, "{data_type:?} is invalid as a DataFusion scalar type") + write!(f, "{data_type} is invalid as a DataFusion scalar type") } Self::InvalidTimeUnit(time_unit) => { write!( diff --git a/datafusion/proto/tests/cases/roundtrip_logical_plan.rs b/datafusion/proto/tests/cases/roundtrip_logical_plan.rs index c76036a4344fb..d8979201d5628 100644 --- a/datafusion/proto/tests/cases/roundtrip_logical_plan.rs +++ b/datafusion/proto/tests/cases/roundtrip_logical_plan.rs @@ -1587,7 +1587,7 @@ fn round_trip_scalar_values_and_data_types() { assert_eq!( dt, roundtrip, "DataType was not the same after round trip!\n\n\ - Input: {dt:?}\n\nRoundtrip: {roundtrip:?}" + Input: {dt}\n\nRoundtrip: {roundtrip:?}" ); } } diff --git a/datafusion/spark/src/function/array/spark_array.rs b/datafusion/spark/src/function/array/spark_array.rs index 1644cde7abc39..bf5842cb5a5a6 100644 --- a/datafusion/spark/src/function/array/spark_array.rs +++ b/datafusion/spark/src/function/array/spark_array.rs @@ -133,7 +133,7 @@ impl ScalarUDFImpl for SparkArray { if let Some(coerced_type) = coerced_type { Ok(coerced_type) } else { - plan_err!("Coercion from {acc:?} to {x:?} failed.") + plan_err!("Coercion from {acc} to {x} failed.") } })?; Ok(vec![new_type; arg_types.len()]) diff --git a/datafusion/spark/src/function/bitmap/bitmap_count.rs b/datafusion/spark/src/function/bitmap/bitmap_count.rs index 966b0930f0b69..e9d24585d741d 100644 --- a/datafusion/spark/src/function/bitmap/bitmap_count.rs +++ b/datafusion/spark/src/function/bitmap/bitmap_count.rs @@ -71,7 +71,7 @@ impl ScalarUDFImpl for BitmapCount { match arg_types.first() { Some(Binary | BinaryView | FixedSizeBinary(_) | LargeBinary) => Ok(Int64), Some(data_type) => plan_err!( - "bitmap_count expects Binary/BinaryView/FixedSizeBinary/LargeBinary as argument, got {:?}", + "bitmap_count expects Binary/BinaryView/FixedSizeBinary/LargeBinary as argument, got {:?}", data_type ), None => internal_err!("bitmap_count does not support zero arguments"), @@ -105,7 +105,7 @@ pub fn bitmap_count_inner(arg: &[ArrayRef]) -> Result { downcast_and_count_ones!(input_array, FixedSizeBinaryArray) } data_type => { - internal_err!("bitmap_count does not support {:?}", data_type) + internal_err!("bitmap_count does not support {data_type}") } }; diff --git a/datafusion/spark/src/function/bitwise/bit_count.rs b/datafusion/spark/src/function/bitwise/bit_count.rs index 73566b9dbdf43..ba44d3bc0a958 100644 --- a/datafusion/spark/src/function/bitwise/bit_count.rs +++ b/datafusion/spark/src/function/bitwise/bit_count.rs @@ -140,7 +140,7 @@ fn spark_bit_count(value_array: &[ArrayRef]) -> Result { } _ => { plan_err!( - "bit_count function does not support data type: {:?}", + "bit_count function does not support data type: {}", value_array.data_type() ) } diff --git a/datafusion/spark/src/function/bitwise/bit_shift.rs b/datafusion/spark/src/function/bitwise/bit_shift.rs index 79f62587c0dd3..bb645b7660584 100644 --- a/datafusion/spark/src/function/bitwise/bit_shift.rs +++ b/datafusion/spark/src/function/bitwise/bit_shift.rs @@ -194,7 +194,7 @@ trait BitShiftUDF: ScalarUDFImpl { } _ => { plan_err!( - "{} function does not support data type: {:?}", + "{} function does not support data type: {}", self.name(), value_array.data_type() ) diff --git a/datafusion/spark/src/function/math/factorial.rs b/datafusion/spark/src/function/math/factorial.rs index 3addc9700ece9..ebb489d4ceb71 100644 --- a/datafusion/spark/src/function/math/factorial.rs +++ b/datafusion/spark/src/function/math/factorial.rs @@ -111,7 +111,7 @@ pub fn spark_factorial(args: &[ColumnarValue]) -> Result { - exec_err!("`factorial` got an unexpected scalar type: {:?}", other) + exec_err!("`factorial` got an unexpected scalar type: {}", other) } ColumnarValue::Array(array) => match array.data_type() { Int32 => { @@ -122,7 +122,7 @@ pub fn spark_factorial(args: &[ColumnarValue]) -> Result { - exec_err!("`factorial` got an unexpected argument type: {:?}", other) + exec_err!("`factorial` got an unexpected argument type: {}", other) } }, } diff --git a/datafusion/spark/src/function/math/hex.rs b/datafusion/spark/src/function/math/hex.rs index 0cdf4f3a9a77e..120a053bb4f9c 100644 --- a/datafusion/spark/src/function/math/hex.rs +++ b/datafusion/spark/src/function/math/hex.rs @@ -272,7 +272,7 @@ pub fn compute_hex( .map(|v| v.map(|b| hex_bytes(b, lowercase)).transpose()) .collect::>()?, _ => exec_err!( - "hex got an unexpected argument type: {:?}", + "hex got an unexpected argument type: {}", array.data_type() )?, }; @@ -287,10 +287,7 @@ pub fn compute_hex( Ok(ColumnarValue::Array(Arc::new(string_array_values))) } - _ => exec_err!( - "hex got an unexpected argument type: {:?}", - array.data_type() - ), + _ => exec_err!("hex got an unexpected argument type: {}", array.data_type()), }, _ => exec_err!("native hex does not support scalar values at this time"), } diff --git a/datafusion/sql/Cargo.toml b/datafusion/sql/Cargo.toml index 7bd775fb5393c..21102d886f225 100644 --- a/datafusion/sql/Cargo.toml +++ b/datafusion/sql/Cargo.toml @@ -67,5 +67,6 @@ datafusion-functions-nested = { workspace = true } datafusion-functions-window = { workspace = true } env_logger = { workspace = true } insta = { workspace = true } +itertools = { workspace = true } paste = "^1.0" rstest = { workspace = true } diff --git a/datafusion/sql/src/planner.rs b/datafusion/sql/src/planner.rs index fc678a8f87112..79e8cd8e123f0 100644 --- a/datafusion/sql/src/planner.rs +++ b/datafusion/sql/src/planner.rs @@ -708,7 +708,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> { Ok(DataType::Time64(TimeUnit::Nanosecond)) } else { // We don't support TIMETZ and TIME WITH TIME ZONE for now - not_impl_err!("Unsupported SQL type {sql_type:?}") + not_impl_err!("Unsupported SQL type {sql_type}") } } SQLDataType::Numeric(exact_number_info) @@ -819,7 +819,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> { | SQLDataType::TsVector | SQLDataType::TsQuery | SQLDataType::GeometricType(_) => { - not_impl_err!("Unsupported SQL type {sql_type:?}") + not_impl_err!("Unsupported SQL type {sql_type}") } } } diff --git a/datafusion/sql/src/relation/mod.rs b/datafusion/sql/src/relation/mod.rs index aa37d74fd4d86..9dfa078701d3d 100644 --- a/datafusion/sql/src/relation/mod.rs +++ b/datafusion/sql/src/relation/mod.rs @@ -57,7 +57,7 @@ impl SqlToRel<'_, S> { planner_context, ) } else { - plan_err!("Unsupported function argument type: {:?}", arg) + plan_err!("Unsupported function argument type: {}", arg) } }) .collect::>(); diff --git a/datafusion/sql/src/unparser/expr.rs b/datafusion/sql/src/unparser/expr.rs index 271dbe030aa40..05d232120dfaf 100644 --- a/datafusion/sql/src/unparser/expr.rs +++ b/datafusion/sql/src/unparser/expr.rs @@ -1658,7 +1658,7 @@ impl Unparser<'_> { fn arrow_dtype_to_ast_dtype(&self, data_type: &DataType) -> Result { match data_type { DataType::Null => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Boolean => Ok(ast::DataType::Bool), DataType::Int8 => Ok(ast::DataType::TinyInt(None)), @@ -1670,7 +1670,7 @@ impl Unparser<'_> { DataType::UInt32 => Ok(ast::DataType::IntegerUnsigned(None)), DataType::UInt64 => Ok(ast::DataType::BigIntUnsigned(None)), DataType::Float16 => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Float32 => Ok(ast::DataType::Float(None)), DataType::Float64 => Ok(self.dialect.float64_ast_dtype()), @@ -1680,57 +1680,57 @@ impl Unparser<'_> { DataType::Date32 => Ok(self.dialect.date32_cast_dtype()), DataType::Date64 => Ok(self.ast_type_for_date64_in_cast()), DataType::Time32(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Time64(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Duration(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Interval(_) => Ok(ast::DataType::Interval), DataType::Binary => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::FixedSizeBinary(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::LargeBinary => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::BinaryView => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Utf8 => Ok(self.dialect.utf8_cast_dtype()), DataType::LargeUtf8 => Ok(self.dialect.large_utf8_cast_dtype()), DataType::Utf8View => Ok(self.dialect.utf8_cast_dtype()), DataType::List(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::FixedSizeList(_, _) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::LargeList(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::ListView(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::LargeListView(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Struct(_) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Union(_, _) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Dictionary(_, val) => self.arrow_dtype_to_ast_dtype(val), DataType::Decimal32(_precision, _scale) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Decimal64(_precision, _scale) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::Decimal128(precision, scale) | DataType::Decimal256(precision, scale) => { @@ -1746,10 +1746,10 @@ impl Unparser<'_> { )) } DataType::Map(_, _) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } DataType::RunEndEncoded(_, _) => { - not_impl_err!("Unsupported DataType: conversion: {data_type:?}") + not_impl_err!("Unsupported DataType: conversion: {data_type}") } } } diff --git a/datafusion/sql/tests/cases/params.rs b/datafusion/sql/tests/cases/params.rs index b429853fe92f1..343a90af3efb1 100644 --- a/datafusion/sql/tests/cases/params.rs +++ b/datafusion/sql/tests/cases/params.rs @@ -20,6 +20,7 @@ use arrow::datatypes::DataType; use datafusion_common::{assert_contains, ParamValues, ScalarValue}; use datafusion_expr::{LogicalPlan, Prepare, Statement}; use insta::assert_snapshot; +use itertools::Itertools as _; use std::collections::HashMap; pub struct ParameterTest<'a> { @@ -54,7 +55,7 @@ fn generate_prepare_stmt_and_data_types(sql: &str) -> (LogicalPlan, String) { let plan = logical_plan(sql).unwrap(); let data_types = match &plan { LogicalPlan::Statement(Statement::Prepare(Prepare { data_types, .. })) => { - format!("{data_types:?}") + data_types.iter().join(", ").to_string() } _ => panic!("Expected a Prepare statement"), }; @@ -160,7 +161,7 @@ fn test_prepare_statement_to_plan_no_param() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Int32]"#); + assert_snapshot!(dt, @r#"Int32"#); /////////////////// // replace params with values @@ -188,7 +189,7 @@ fn test_prepare_statement_to_plan_no_param() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[]"#); + assert_snapshot!(dt, @r#""#); /////////////////// // replace params with values @@ -269,7 +270,7 @@ fn test_prepare_statement_to_plan_params_as_constants() { EmptyRelation: rows=1 "# ); - assert_snapshot!(dt, @r#"[Int32]"#); + assert_snapshot!(dt, @r#"Int32"#); /////////////////// // replace params with values @@ -294,7 +295,7 @@ fn test_prepare_statement_to_plan_params_as_constants() { EmptyRelation: rows=1 "# ); - assert_snapshot!(dt, @r#"[Int32]"#); + assert_snapshot!(dt, @r#"Int32"#); /////////////////// // replace params with values @@ -319,7 +320,7 @@ fn test_prepare_statement_to_plan_params_as_constants() { EmptyRelation: rows=1 "# ); - assert_snapshot!(dt, @r#"[Int32, Float64]"#); + assert_snapshot!(dt, @r#"Int32, Float64"#); /////////////////// // replace params with values @@ -686,7 +687,7 @@ fn test_prepare_statement_to_plan_one_param() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Int32]"#); + assert_snapshot!(dt, @r#"Int32"#); /////////////////// // replace params with values @@ -719,7 +720,7 @@ fn test_prepare_statement_to_plan_data_type() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Float64]"#); + assert_snapshot!(dt, @r#"Float64"#); /////////////////// // replace params with values still succeed and use Float64 @@ -752,7 +753,7 @@ fn test_prepare_statement_to_plan_multi_params() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Int32, Utf8View, Float64, Int32, Float64, Utf8View]"#); + assert_snapshot!(dt, @r#"Int32, Utf8View, Float64, Int32, Float64, Utf8View"#); /////////////////// // replace params with values @@ -797,7 +798,7 @@ fn test_prepare_statement_to_plan_having() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Int32, Float64, Float64, Float64]"#); + assert_snapshot!(dt, @r#"Int32, Float64, Float64, Float64"#); /////////////////// // replace params with values @@ -836,7 +837,7 @@ fn test_prepare_statement_to_plan_limit() { TableScan: person "# ); - assert_snapshot!(dt, @r#"[Int64, Int64]"#); + assert_snapshot!(dt, @r#"Int64, Int64"#); // replace params with values let param_values = vec![ScalarValue::Int64(Some(10)), ScalarValue::Int64(Some(200))]; diff --git a/datafusion/sql/tests/sql_integration.rs b/datafusion/sql/tests/sql_integration.rs index 9e16ab297e112..f66af28f436e6 100644 --- a/datafusion/sql/tests/sql_integration.rs +++ b/datafusion/sql/tests/sql_integration.rs @@ -4664,7 +4664,7 @@ fn test_custom_type_plan() -> Result<()> { let err = planner.statement_to_plan(ast.pop_front().unwrap()); assert_contains!( err.unwrap_err().to_string(), - "This feature is not implemented: Unsupported SQL type Datetime(None)" + "This feature is not implemented: Unsupported SQL type DATETIME" ); fn plan_sql(sql: &str) -> LogicalPlan { diff --git a/datafusion/sqllogictest/test_files/aggregate.slt b/datafusion/sqllogictest/test_files/aggregate.slt index caf8d637ec45e..e9f826b761fdf 100644 --- a/datafusion/sqllogictest/test_files/aggregate.slt +++ b/datafusion/sqllogictest/test_files/aggregate.slt @@ -148,10 +148,10 @@ SELECT c1, approx_percentile_cont(0.95, -1000) WITHIN GROUP (ORDER BY c3) AS c3_ statement error Failed to coerce arguments to satisfy a call to 'approx_percentile_cont' function SELECT approx_percentile_cont(0.95, c1) WITHIN GROUP (ORDER BY c3) FROM aggregate_test_100 -statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'approx_percentile_cont' function: coercion from \[Int16, Float64, Float64\] to the signature OneOf(.*) failed(.|\n)* +statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'approx_percentile_cont' function: coercion from Int16, Float64, Float64 to the signature OneOf(.*) failed(.|\n)* SELECT approx_percentile_cont(0.95, 111.1) WITHIN GROUP (ORDER BY c3) FROM aggregate_test_100 -statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'approx_percentile_cont' function: coercion from \[Float64, Float64, Float64\] to the signature OneOf(.*) failed(.|\n)* +statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'approx_percentile_cont' function: coercion from Float64, Float64, Float64 to the signature OneOf(.*) failed(.|\n)* SELECT approx_percentile_cont(0.95, 111.1) WITHIN GROUP (ORDER BY c12) FROM aggregate_test_100 statement error DataFusion error: This feature is not implemented: Percentile value for 'APPROX_PERCENTILE_CONT' must be a literal @@ -2367,7 +2367,7 @@ drop table t; # test count with largeutf8 statement ok -create table t (c string) as values +create table t (c string) as values (arrow_cast('a', 'LargeUtf8')), (arrow_cast('b', 'LargeUtf8')), (arrow_cast(null, 'LargeUtf8')), @@ -5047,7 +5047,7 @@ statement ok create table t (c1 decimal(10, 0), c2 int) as values (null, null), (null, null), (null, null); query RTIT -select +select sum(c1), arrow_typeof(sum(c1)), sum(c2), arrow_typeof(sum(c2)) from t; @@ -5299,7 +5299,7 @@ drop table dict_null_test; # avg_duration statement ok -create table d as values +create table d as values (arrow_cast(1, 'Duration(Second)'), arrow_cast(2, 'Duration(Millisecond)'), arrow_cast(3, 'Duration(Microsecond)'), arrow_cast(4, 'Duration(Nanosecond)'), 1), (arrow_cast(11, 'Duration(Second)'), arrow_cast(22, 'Duration(Millisecond)'), arrow_cast(33, 'Duration(Microsecond)'), arrow_cast(44, 'Duration(Nanosecond)'), 1); @@ -5353,7 +5353,7 @@ FROM d WHERE column1 IS NOT NULL; # Centered average window function query I?? -SELECT column5, column1, avg(column1) OVER (ORDER BY column5 ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING) as centered_avg +SELECT column5, column1, avg(column1) OVER (ORDER BY column5 ROWS BETWEEN 1 PRECEDING AND 1 FOLLOWING) as centered_avg FROM d WHERE column1 IS NOT NULL; ---- 1 0 days 0 hours 0 mins 1 secs 0 days 0 hours 0 mins 6 secs @@ -5806,7 +5806,7 @@ NULL NULL 3 NULL 1 4 0 8 0 # regr_*() basic tests query RRIRRRRRR -select +select regr_slope(column2, column1), regr_intercept(column2, column1), regr_count(column2, column1), @@ -5821,7 +5821,7 @@ from (values (1,2), (2,4), (3,6)); 2 0 3 1 2 4 2 8 4 query RRIRRRRRR -select +select regr_slope(c12, c11), regr_intercept(c12, c11), regr_count(c12, c11), @@ -5839,7 +5839,7 @@ from aggregate_test_100; # regr_*() functions ignore NULLs query RRIRRRRRR -select +select regr_slope(column2, column1), regr_intercept(column2, column1), regr_count(column2, column1), @@ -5854,7 +5854,7 @@ from (values (1,NULL), (2,4), (3,6)); 2 0 2 1 2.5 5 0.5 2 1 query RRIRRRRRR -select +select regr_slope(column2, column1), regr_intercept(column2, column1), regr_count(column2, column1), @@ -5869,7 +5869,7 @@ from (values (1,NULL), (NULL,4), (3,6)); NULL NULL 1 NULL 3 6 0 0 0 query RRIRRRRRR -select +select regr_slope(column2, column1), regr_intercept(column2, column1), regr_count(column2, column1), @@ -5884,8 +5884,8 @@ from (values (1,NULL), (NULL,4), (NULL,NULL)); NULL NULL 0 NULL NULL NULL NULL NULL NULL query TRRIRRRRRR rowsort -select - column3, +select + column3, regr_slope(column2, column1), regr_intercept(column2, column1), regr_count(column2, column1), @@ -5909,7 +5909,7 @@ statement ok set datafusion.execution.batch_size = 1; query RRIRRRRRR -select +select regr_slope(c12, c11), regr_intercept(c12, c11), regr_count(c12, c11), @@ -5927,7 +5927,7 @@ statement ok set datafusion.execution.batch_size = 2; query RRIRRRRRR -select +select regr_slope(c12, c11), regr_intercept(c12, c11), regr_count(c12, c11), @@ -5945,7 +5945,7 @@ statement ok set datafusion.execution.batch_size = 3; query RRIRRRRRR -select +select regr_slope(c12, c11), regr_intercept(c12, c11), regr_count(c12, c11), @@ -6185,7 +6185,7 @@ select string_agg(k, ',' order by when k = 'a' then 3 when k = 'b' then 0 when k = 'c' then 2 - when k = 'd' then 1 + when k = 'd' then 1 end) from t; ---- @@ -6197,7 +6197,7 @@ select string_agg(k, ',' order by when k = 'a' then 3 when k = 'b' then 0 when k = 'c' then 2 - when k = 'd' then 1 + when k = 'd' then 1 end desc) from t; ---- @@ -6208,7 +6208,7 @@ query TT explain select string_agg(k, ',' order by v) from t; ---- logical_plan -01)Aggregate: groupBy=[[]], aggr=[[string_agg(t.k, Utf8(",")) ORDER BY [t.v ASC NULLS LAST]]] +01)Aggregate: groupBy=[[]], aggr=[[string_agg(t.k, Utf8(",")) ORDER BY [t.v ASC NULLS LAST]]] 02)--TableScan: t projection=[k, v] physical_plan 01)AggregateExec: mode=Single, gby=[], aggr=[string_agg(t.k,Utf8(",")) ORDER BY [t.v ASC NULLS LAST]] @@ -6224,7 +6224,7 @@ query TT explain select string_agg(k, ',' order by v desc) from t; ---- logical_plan -01)Aggregate: groupBy=[[]], aggr=[[string_agg(t.k, Utf8(",")) ORDER BY [t.v DESC NULLS FIRST]]] +01)Aggregate: groupBy=[[]], aggr=[[string_agg(t.k, Utf8(",")) ORDER BY [t.v DESC NULLS FIRST]]] 02)--TableScan: t projection=[k, v] physical_plan 01)AggregateExec: mode=Single, gby=[], aggr=[string_agg(t.k,Utf8(",")) ORDER BY [t.v DESC NULLS FIRST]] @@ -6273,7 +6273,7 @@ CREATE TABLE float_table ( # Test string_agg with largeutf8 statement ok -create table string_agg_large_utf8 (c string) as values +create table string_agg_large_utf8 (c string) as values (arrow_cast('a', 'LargeUtf8')), (arrow_cast('b', 'LargeUtf8')), (arrow_cast('c', 'LargeUtf8')) @@ -6328,7 +6328,7 @@ select count(*) from (select count(*) a, count(*) b from (select 1)); # UTF8 string matters for string to &[u8] conversion, add it to prevent regression statement ok -create table distinct_count_string_table as values +create table distinct_count_string_table as values (1, 'a', 'longstringtest_a', '台灣'), (2, 'b', 'longstringtest_b1', '日本'), (2, 'b', 'longstringtest_b2', '中國'), diff --git a/datafusion/sqllogictest/test_files/errors.slt b/datafusion/sqllogictest/test_files/errors.slt index 6b80f56bcf575..3e60423df8a0e 100644 --- a/datafusion/sqllogictest/test_files/errors.slt +++ b/datafusion/sqllogictest/test_files/errors.slt @@ -120,7 +120,7 @@ from aggregate_test_100 order by c9 # WindowFunction wrong signature -statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'nth_value' function: coercion from \[Int32, Int64, Int64\] to the signature OneOf\(\[Any\(0\), Any\(1\), Any\(2\)\]\) failed +statement error DataFusion error: Error during planning: Failed to coerce arguments to satisfy a call to 'nth_value' function: coercion from Int32, Int64, Int64 to the signature OneOf\(\[Any\(0\), Any\(1\), Any\(2\)\]\) failed select c9, nth_value(c5, 2, 3) over (order by c9) as nv1 diff --git a/datafusion/substrait/src/logical_plan/consumer/expr/mod.rs b/datafusion/substrait/src/logical_plan/consumer/expr/mod.rs index d701827671902..7358f1422f1b4 100644 --- a/datafusion/substrait/src/logical_plan/consumer/expr/mod.rs +++ b/datafusion/substrait/src/logical_plan/consumer/expr/mod.rs @@ -93,7 +93,7 @@ pub async fn from_substrait_rex( consumer.consume_dynamic_parameter(expr, input_schema).await } }, - None => substrait_err!("Expression must set rex_type: {:?}", expression), + None => substrait_err!("Expression must set rex_type: {expression:?}"), } } diff --git a/datafusion/substrait/src/logical_plan/consumer/expr/subquery.rs b/datafusion/substrait/src/logical_plan/consumer/expr/subquery.rs index f7e4c2bb0fbd1..917bcc007716b 100644 --- a/datafusion/substrait/src/logical_plan/consumer/expr/subquery.rs +++ b/datafusion/substrait/src/logical_plan/consumer/expr/subquery.rs @@ -90,13 +90,12 @@ pub async fn from_subquery( ))) } other_type => substrait_err!( - "unimplemented type {:?} for set predicate", - other_type + "unimplemented type {other_type:?} for set predicate" ), } } other_type => { - substrait_err!("Subquery type {:?} not implemented", other_type) + substrait_err!("Subquery type {other_type:?} not implemented") } }, None => { diff --git a/datafusion/substrait/src/logical_plan/consumer/rel/join_rel.rs b/datafusion/substrait/src/logical_plan/consumer/rel/join_rel.rs index ade8a4e77e65a..ef78e1c97173b 100644 --- a/datafusion/substrait/src/logical_plan/consumer/rel/join_rel.rs +++ b/datafusion/substrait/src/logical_plan/consumer/rel/join_rel.rs @@ -148,6 +148,6 @@ fn from_substrait_jointype(join_type: i32) -> datafusion::common::Result plan_err!("unsupported join type {substrait_join_type:?}"), } } else { - plan_err!("invalid join type variant {join_type:?}") + plan_err!("invalid join type variant {join_type}") } } diff --git a/datafusion/substrait/src/logical_plan/consumer/rel/read_rel.rs b/datafusion/substrait/src/logical_plan/consumer/rel/read_rel.rs index 3ea318b214631..48e93c04bb034 100644 --- a/datafusion/substrait/src/logical_plan/consumer/rel/read_rel.rs +++ b/datafusion/substrait/src/logical_plan/consumer/rel/read_rel.rs @@ -221,7 +221,7 @@ pub async fn from_read_rel( .await } _ => { - not_impl_err!("Unsupported ReadType: {:?}", read.read_type) + not_impl_err!("Unsupported Readtype: {:?}", read.read_type) } } } diff --git a/datafusion/substrait/src/logical_plan/producer/rel/aggregate_rel.rs b/datafusion/substrait/src/logical_plan/producer/rel/aggregate_rel.rs index 4abd283a7ee0b..917959ea7ddae 100644 --- a/datafusion/substrait/src/logical_plan/producer/rel/aggregate_rel.rs +++ b/datafusion/substrait/src/logical_plan/producer/rel/aggregate_rel.rs @@ -174,7 +174,7 @@ pub fn to_substrait_agg_measure( to_substrait_agg_measure(producer, expr, schema) } _ => internal_err!( - "Expression must be compatible with aggregation. Unsupported expression: {:?}. ExpressionType: {:?}", + "Expression must be compatible with aggregation. Unsupported expression: {:?}. Expressiontype: {}", expr, expr.variant_name() ), diff --git a/datafusion/substrait/src/logical_plan/producer/types.rs b/datafusion/substrait/src/logical_plan/producer/types.rs index d819c2042c08a..3da9269c5b9e3 100644 --- a/datafusion/substrait/src/logical_plan/producer/types.rs +++ b/datafusion/substrait/src/logical_plan/producer/types.rs @@ -325,7 +325,7 @@ pub(crate) fn to_substrait_type( precision: *p as i32, })), }), - _ => not_impl_err!("Unsupported cast type: {dt:?}"), + _ => not_impl_err!("Unsupported cast type: {dt}"), } } @@ -446,7 +446,7 @@ mod tests { } fn round_trip_type(dt: DataType) -> Result<()> { - println!("Checking round trip of {dt:?}"); + println!("Checking round trip of {dt}"); // As DataFusion doesn't consider nullability as a property of the type, but field, // it doesn't matter if we set nullability to true or false here. diff --git a/datafusion/substrait/src/physical_plan/consumer.rs b/datafusion/substrait/src/physical_plan/consumer.rs index 4990054ac7fc7..ecf465dd3f18d 100644 --- a/datafusion/substrait/src/physical_plan/consumer.rs +++ b/datafusion/substrait/src/physical_plan/consumer.rs @@ -166,7 +166,7 @@ pub async fn from_substrait_rel( ), } } - _ => not_impl_err!("Unsupported RelType: {:?}", rel.rel_type), + _ => not_impl_err!("Unsupported Reltype: {:?}", rel.rel_type), } } diff --git a/datafusion/substrait/tests/cases/roundtrip_logical_plan.rs b/datafusion/substrait/tests/cases/roundtrip_logical_plan.rs index 616dc917efe42..bd5229554d6ca 100644 --- a/datafusion/substrait/tests/cases/roundtrip_logical_plan.rs +++ b/datafusion/substrait/tests/cases/roundtrip_logical_plan.rs @@ -1475,7 +1475,7 @@ fn check_post_join_filters(rel: &Rel) -> Result<()> { } Some(RelType::ExtensionLeaf(_)) | Some(RelType::Read(_)) => Ok(()), _ => not_impl_err!( - "Unsupported RelType: {:?} in post join filter check", + "Unsupported Reltype: {:?} in post join filter check", rel.rel_type ), }