Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RFC: Demonstrate what a function package might look like -- encoding expressions #8046

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ members = [
"datafusion/core",
"datafusion/expr",
"datafusion/execution",
"datafusion/functions",
"datafusion/optimizer",
"datafusion/physical-expr",
"datafusion/physical-plan",
Expand Down Expand Up @@ -61,6 +62,7 @@ ctor = "0.2.0"
datafusion = { path = "datafusion/core" }
datafusion-common = { path = "datafusion/common" }
datafusion-expr = { path = "datafusion/expr" }
datafusion-functions = { path = "datafusion/functions" }
datafusion-sql = { path = "datafusion/sql" }
datafusion-optimizer = { path = "datafusion/optimizer" }
datafusion-physical-expr = { path = "datafusion/physical-expr" }
Expand Down
3 changes: 2 additions & 1 deletion datafusion/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ backtrace = ["datafusion-common/backtrace"]
compression = ["xz2", "bzip2", "flate2", "zstd", "async-compression"]
crypto_expressions = ["datafusion-physical-expr/crypto_expressions", "datafusion-optimizer/crypto_expressions"]
default = ["crypto_expressions", "encoding_expressions", "regex_expressions", "unicode_expressions", "compression", "parquet"]
encoding_expressions = ["datafusion-physical-expr/encoding_expressions"]
encoding_expressions = ["datafusion-functions/encoding_expressions"]
# Used for testing ONLY: causes all values to hash to the same value (test for collisions)
force_hash_collisions = []
parquet = ["datafusion-common/parquet", "dep:parquet"]
Expand All @@ -65,6 +65,7 @@ dashmap = { workspace = true }
datafusion-common = { path = "../common", version = "32.0.0", features = ["object_store"], default-features = false }
datafusion-execution = { workspace = true }
datafusion-expr = { workspace = true }
datafusion-functions = { workspace = true }
datafusion-optimizer = { path = "../optimizer", version = "32.0.0", default-features = false }
datafusion-physical-expr = { path = "../physical-expr", version = "32.0.0", default-features = false }
datafusion-physical-plan = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/dataframe/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ use crate::logical_expr::{
use crate::physical_plan::SendableRecordBatchStream;
use crate::physical_plan::{collect, collect_partitioned};
use crate::physical_plan::{execute_stream, execute_stream_partitioned, ExecutionPlan};
use crate::prelude::SessionContext;
use crate::prelude::*;

/// Contains options that control how data is
/// written out from a DataFrame
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/listing/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ pub fn expr_applicable_for_cols(col_names: &[String], expr: &Expr) -> bool {
}
}
Expr::ScalarUDF(ScalarUDF { fun, .. }) => {
match fun.signature.volatility {
match fun.signature().volatility {
Volatility::Immutable => VisitRecursion::Continue,
// TODO: Stable functions could be `applicable`, but that would require access to the context
Volatility::Stable | Volatility::Volatile => {
Expand Down
22 changes: 7 additions & 15 deletions datafusion/core/src/execution/context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -806,7 +806,7 @@ impl SessionContext {
self.state
.write()
.scalar_functions
.insert(f.name.clone(), Arc::new(f));
.insert(f.name().to_string(), Arc::new(f));
}

/// Registers an aggregate UDF within this context.
Expand Down Expand Up @@ -1298,14 +1298,18 @@ impl SessionState {
);
}

// register built in functions
let mut scalar_functions = HashMap::new();
datafusion_functions::register_all(&mut scalar_functions);

SessionState {
session_id,
analyzer: Analyzer::new(),
optimizer: Optimizer::new(),
physical_optimizers: PhysicalOptimizer::new(),
query_planner: Arc::new(DefaultQueryPlanner {}),
catalog_list,
scalar_functions: HashMap::new(),
scalar_functions,
aggregate_functions: HashMap::new(),
window_functions: HashMap::new(),
serializer_registry: Arc::new(EmptySerializerRegistry),
Expand All @@ -1315,19 +1319,7 @@ impl SessionState {
table_factories,
}
}
/// Returns new [`SessionState`] using the provided
/// [`SessionConfig`] and [`RuntimeEnv`].
#[deprecated(
since = "32.0.0",
note = "Use SessionState::new_with_config_rt_and_catalog_list"
)]
pub fn with_config_rt_and_catalog_list(
config: SessionConfig,
runtime: Arc<RuntimeEnv>,
catalog_list: Arc<dyn CatalogList>,
) -> Self {
Self::new_with_config_rt_and_catalog_list(config, runtime, catalog_list)
}

fn register_default_schema(
config: &SessionConfig,
table_factories: &HashMap<String, Arc<dyn TableProviderFactory>>,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ fn create_physical_name(e: &Expr, is_first_expr: bool) -> Result<String> {
create_function_physical_name(&func.fun.to_string(), false, &func.args)
}
Expr::ScalarUDF(ScalarUDF { fun, args }) => {
create_function_physical_name(&fun.name, false, args)
create_function_physical_name(fun.name(), false, args)
}
Expr::WindowFunction(WindowFunction { fun, args, .. }) => {
create_function_physical_name(&fun.to_string(), false, args)
Expand Down
52 changes: 0 additions & 52 deletions datafusion/expr/src/built_in_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,14 +72,10 @@ pub enum BuiltinScalarFunction {
Cos,
/// cos
Cosh,
/// Decode
Decode,
/// degrees
Degrees,
/// Digest
Digest,
/// Encode
Encode,
/// exp
Exp,
/// factorial
Expand Down Expand Up @@ -353,9 +349,7 @@ impl BuiltinScalarFunction {
BuiltinScalarFunction::Coalesce => Volatility::Immutable,
BuiltinScalarFunction::Cos => Volatility::Immutable,
BuiltinScalarFunction::Cosh => Volatility::Immutable,
BuiltinScalarFunction::Decode => Volatility::Immutable,
BuiltinScalarFunction::Degrees => Volatility::Immutable,
BuiltinScalarFunction::Encode => Volatility::Immutable,
BuiltinScalarFunction::Exp => Volatility::Immutable,
BuiltinScalarFunction::Factorial => Volatility::Immutable,
BuiltinScalarFunction::Floor => Volatility::Immutable,
Expand Down Expand Up @@ -710,30 +704,6 @@ impl BuiltinScalarFunction {
BuiltinScalarFunction::Digest => {
utf8_or_binary_to_binary_type(&input_expr_types[0], "digest")
}
BuiltinScalarFunction::Encode => Ok(match input_expr_types[0] {
Copy link
Contributor Author

@alamb alamb Nov 3, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This metadata information about the functions is now moved into functions/encoding.rs module, along side its implementation

Utf8 => Utf8,
LargeUtf8 => LargeUtf8,
Binary => Utf8,
LargeBinary => LargeUtf8,
Null => Null,
_ => {
return plan_err!(
"The encode function can only accept utf8 or binary."
);
}
}),
BuiltinScalarFunction::Decode => Ok(match input_expr_types[0] {
Utf8 => Binary,
LargeUtf8 => LargeBinary,
Binary => Binary,
LargeBinary => LargeBinary,
Null => Null,
_ => {
return plan_err!(
"The decode function can only accept utf8 or binary."
);
}
}),
BuiltinScalarFunction::SplitPart => {
utf8_to_str_type(&input_expr_types[0], "split_part")
}
Expand Down Expand Up @@ -1038,24 +1008,6 @@ impl BuiltinScalarFunction {
],
self.volatility(),
),
BuiltinScalarFunction::Encode => Signature::one_of(
vec![
Exact(vec![Utf8, Utf8]),
Exact(vec![LargeUtf8, Utf8]),
Exact(vec![Binary, Utf8]),
Exact(vec![LargeBinary, Utf8]),
],
self.volatility(),
),
BuiltinScalarFunction::Decode => Signature::one_of(
vec![
Exact(vec![Utf8, Utf8]),
Exact(vec![LargeUtf8, Utf8]),
Exact(vec![Binary, Utf8]),
Exact(vec![LargeBinary, Utf8]),
],
self.volatility(),
),
BuiltinScalarFunction::DateTrunc => Signature::one_of(
vec![
Exact(vec![Utf8, Timestamp(Nanosecond, None)]),
Expand Down Expand Up @@ -1461,10 +1413,6 @@ fn aliases(func: &BuiltinScalarFunction) -> &'static [&'static str] {
BuiltinScalarFunction::SHA384 => &["sha384"],
BuiltinScalarFunction::SHA512 => &["sha512"],

// encode/decode
BuiltinScalarFunction::Encode => &["encode"],
BuiltinScalarFunction::Decode => &["decode"],

// other functions
BuiltinScalarFunction::ArrowTypeof => &["arrow_typeof"],

Expand Down
4 changes: 2 additions & 2 deletions datafusion/expr/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1178,7 +1178,7 @@ impl fmt::Display for Expr {
fmt_function(f, &func.fun.to_string(), false, &func.args, true)
}
Expr::ScalarUDF(ScalarUDF { fun, args }) => {
fmt_function(f, &fun.name, false, args, true)
fmt_function(f, fun.name(), false, args, true)
}
Expr::WindowFunction(WindowFunction {
fun,
Expand Down Expand Up @@ -1512,7 +1512,7 @@ fn create_name(e: &Expr) -> Result<String> {
create_function_name(&func.fun.to_string(), false, &func.args)
}
Expr::ScalarUDF(ScalarUDF { fun, args }) => {
create_function_name(&fun.name, false, args)
create_function_name(fun.name(), false, args)
}
Expr::WindowFunction(WindowFunction {
fun,
Expand Down
35 changes: 5 additions & 30 deletions datafusion/expr/src/expr_fn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -737,8 +737,9 @@ scalar_expr!(
"converts the Unicode code point to a UTF8 character"
);
scalar_expr!(Digest, digest, input algorithm, "compute the binary hash of `input`, using the `algorithm`");
scalar_expr!(Encode, encode, input encoding, "encode the `input`, using the `encoding`. encoding can be base64 or hex");
scalar_expr!(Decode, decode, input encoding, "decode the`input`, using the `encoding`. encoding can be base64 or hex");
// TODO make a variant of Expr that can invoke a function by name
alamb marked this conversation as resolved.
Show resolved Hide resolved
//scalar_udf!(encode, datafusion_functions::encoding::encode_udf, input encoding, "encode the `input`, using the `encoding`. encoding can be base64 or hex");
//scalar_expr!(Decode, decode, input encoding, "decode the`input`, using the `encoding`. encoding can be base64 or hex");
scalar_expr!(InitCap, initcap, string, "converts the first letter of each word in `string` in uppercase and the remaining characters in lowercase");
scalar_expr!(Left, left, string n, "returns the first `n` characters in the `string`");
scalar_expr!(Lower, lower, string, "convert the string to lower case");
Expand Down Expand Up @@ -1070,8 +1071,8 @@ mod test {
test_scalar_expr!(CharacterLength, character_length, string);
test_scalar_expr!(Chr, chr, string);
test_scalar_expr!(Digest, digest, string, algorithm);
test_scalar_expr!(Encode, encode, string, encoding);
test_scalar_expr!(Decode, decode, string, encoding);
//test_scalar_expr!(Encode, encode, string, encoding);
//test_scalar_expr!(Decode, decode, string, encoding);
test_scalar_expr!(Gcd, gcd, arg_1, arg_2);
test_scalar_expr!(Lcm, lcm, arg_1, arg_2);
test_scalar_expr!(InitCap, initcap, string);
Expand Down Expand Up @@ -1171,30 +1172,4 @@ mod test {
unreachable!();
}
}

#[test]
fn encode_function_definitions() {
Copy link
Contributor Author

@alamb alamb Nov 19, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think these tests add a lot -- they simply encode the signature again. This is also covered by actually calling encode() via the expr API / dataframe tests which is done.

if let Expr::ScalarFunction(ScalarFunction { fun, args }) =
encode(col("tableA.a"), lit("base64"))
{
let name = BuiltinScalarFunction::Encode;
assert_eq!(name, fun);
assert_eq!(2, args.len());
} else {
unreachable!();
}
}

#[test]
fn decode_function_definitions() {
if let Expr::ScalarFunction(ScalarFunction { fun, args }) =
decode(col("tableA.a"), lit("hex"))
{
let name = BuiltinScalarFunction::Decode;
assert_eq!(name, fun);
assert_eq!(2, args.len());
} else {
unreachable!();
}
}
}
2 changes: 1 addition & 1 deletion datafusion/expr/src/expr_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ impl ExprSchemable for Expr {
.iter()
.map(|e| e.get_type(schema))
.collect::<Result<Vec<_>>>()?;
Ok((fun.return_type)(&data_types)?.as_ref().clone())
Ok(fun.return_type(&data_types)?)
}
Expr::ScalarFunction(ScalarFunction { fun, args }) => {
let data_types = args
Expand Down
2 changes: 1 addition & 1 deletion datafusion/expr/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ pub use signature::{
};
pub use table_source::{TableProviderFilterPushDown, TableSource, TableType};
pub use udaf::AggregateUDF;
pub use udf::ScalarUDF;
pub use udf::{FunctionImplementation, ScalarUDF};
pub use udwf::WindowUDF;
pub use window_frame::{WindowFrame, WindowFrameBound, WindowFrameUnits};
pub use window_function::{BuiltInWindowFunction, WindowFunction};
Expand Down
Loading
Loading