Skip to content

Commit

Permalink
Migrate OrderSensitiveArrayAgg to be a user defined aggregate (#11564)
Browse files Browse the repository at this point in the history
* first draft

Signed-off-by: jayzhan211 <[email protected]>

* rm old agg

Signed-off-by: jayzhan211 <[email protected]>

* replace udaf with interal function - create aggregate with dfschema

Signed-off-by: jayzhan211 <[email protected]>

* rm test

Signed-off-by: jayzhan211 <[email protected]>

* cleanup

Signed-off-by: jayzhan211 <[email protected]>

* rm useless

Signed-off-by: jayzhan211 <[email protected]>

* fix test

Signed-off-by: jayzhan211 <[email protected]>

* rename

Signed-off-by: jayzhan211 <[email protected]>

---------

Signed-off-by: jayzhan211 <[email protected]>
  • Loading branch information
jayzhan211 committed Jul 23, 2024
1 parent a2ac00d commit d941dc3
Show file tree
Hide file tree
Showing 23 changed files with 681 additions and 652 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,7 @@ pub(crate) mod tests {
self.column_name(),
false,
false,
false,
)
.unwrap()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ mod tests {
name,
false,
false,
false,
)
.unwrap()
}
Expand Down Expand Up @@ -378,6 +379,7 @@ mod tests {
"Sum(b)",
false,
false,
false,
)?];
let groups: Vec<(Arc<dyn PhysicalExpr>, String)> =
vec![(col("c", &schema)?, "c".to_string())];
Expand Down
35 changes: 6 additions & 29 deletions datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1839,34 +1839,7 @@ pub fn create_aggregate_expr_with_name_and_maybe_filter(
.unwrap_or(sqlparser::ast::NullTreatment::RespectNulls)
== NullTreatment::IgnoreNulls;

// TODO: Remove this after array_agg are all udafs
let (agg_expr, filter, order_by) = match func_def {
AggregateFunctionDefinition::UDF(udf)
if udf.name() == "ARRAY_AGG" && order_by.is_some() =>
{
// not yet support UDAF, fallback to builtin
let physical_sort_exprs = match order_by {
Some(exprs) => Some(create_physical_sort_exprs(
exprs,
logical_input_schema,
execution_props,
)?),
None => None,
};
let ordering_reqs: Vec<PhysicalSortExpr> =
physical_sort_exprs.clone().unwrap_or(vec![]);
let fun = aggregates::AggregateFunction::ArrayAgg;
let agg_expr = aggregates::create_aggregate_expr(
&fun,
*distinct,
&physical_args,
&ordering_reqs,
physical_input_schema,
name,
ignore_nulls,
)?;
(agg_expr, filter, physical_sort_exprs)
}
AggregateFunctionDefinition::BuiltIn(fun) => {
let physical_sort_exprs = match order_by {
Some(exprs) => Some(create_physical_sort_exprs(
Expand Down Expand Up @@ -1899,19 +1872,23 @@ pub fn create_aggregate_expr_with_name_and_maybe_filter(
)?),
None => None,
};

let ordering_reqs: Vec<PhysicalSortExpr> =
physical_sort_exprs.clone().unwrap_or(vec![]);
let agg_expr = udaf::create_aggregate_expr(

let agg_expr = udaf::create_aggregate_expr_with_dfschema(
fun,
&physical_args,
args,
&sort_exprs,
&ordering_reqs,
physical_input_schema,
logical_input_schema,
name,
ignore_nulls,
*distinct,
false,
)?;

(agg_expr, filter, physical_sort_exprs)
}
};
Expand Down
1 change: 1 addition & 0 deletions datafusion/core/tests/fuzz_cases/aggregate_fuzz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ async fn run_aggregate_test(input1: Vec<RecordBatch>, group_by_columns: Vec<&str
"sum1",
false,
false,
false,
)
.unwrap()];
let expr = group_by_columns
Expand Down
8 changes: 7 additions & 1 deletion datafusion/expr/src/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
use crate::ColumnarValue;
use crate::{Accumulator, Expr, PartitionEvaluator};
use arrow::datatypes::{DataType, Field, Schema};
use datafusion_common::Result;
use datafusion_common::{DFSchema, Result};
use std::sync::Arc;

#[derive(Debug, Clone, Copy)]
Expand Down Expand Up @@ -57,6 +57,9 @@ pub struct AccumulatorArgs<'a> {
/// The schema of the input arguments
pub schema: &'a Schema,

/// The schema of the input arguments
pub dfschema: &'a DFSchema,

/// Whether to ignore nulls.
///
/// SQL allows the user to specify `IGNORE NULLS`, for example:
Expand All @@ -78,6 +81,9 @@ pub struct AccumulatorArgs<'a> {
/// If no `ORDER BY` is specified, `sort_exprs`` will be empty.
pub sort_exprs: &'a [Expr],

/// Whether the aggregation is running in reverse order
pub is_reversed: bool,

/// The name of the aggregate expression
pub name: &'a str,

Expand Down
9 changes: 6 additions & 3 deletions datafusion/functions-aggregate/src/approx_percentile_cont.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ use arrow::{
use arrow_schema::{Field, Schema};

use datafusion_common::{
downcast_value, internal_err, not_impl_err, plan_err, DataFusionError, ScalarValue,
downcast_value, internal_err, not_impl_err, plan_err, DFSchema, DataFusionError,
ScalarValue,
};
use datafusion_expr::function::{AccumulatorArgs, StateFieldsArgs};
use datafusion_expr::type_coercion::aggregates::{INTEGERS, NUMERICS};
Expand All @@ -42,7 +43,7 @@ use datafusion_expr::{
use datafusion_physical_expr_common::aggregate::tdigest::{
TDigest, TryIntoF64, DEFAULT_MAX_SIZE,
};
use datafusion_physical_expr_common::utils::limited_convert_logical_expr_to_physical_expr;
use datafusion_physical_expr_common::utils::limited_convert_logical_expr_to_physical_expr_with_dfschema;

make_udaf_expr_and_func!(
ApproxPercentileCont,
Expand Down Expand Up @@ -135,7 +136,9 @@ impl ApproxPercentileCont {
fn get_lit_value(expr: &Expr) -> datafusion_common::Result<ScalarValue> {
let empty_schema = Arc::new(Schema::empty());
let empty_batch = RecordBatch::new_empty(Arc::clone(&empty_schema));
let expr = limited_convert_logical_expr_to_physical_expr(expr, &empty_schema)?;
let dfschema = DFSchema::empty();
let expr =
limited_convert_logical_expr_to_physical_expr_with_dfschema(expr, &dfschema)?;
let result = expr.evaluate(&empty_batch)?;
match result {
ColumnarValue::Array(_) => Err(DataFusionError::Internal(format!(
Expand Down
Loading

0 comments on commit d941dc3

Please sign in to comment.