diff --git a/native/spark-expr/src/json_funcs/mod.rs b/native/spark-expr/src/json_funcs/mod.rs new file mode 100644 index 0000000000..de3037590d --- /dev/null +++ b/native/spark-expr/src/json_funcs/mod.rs @@ -0,0 +1,20 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +mod to_json; + +pub use to_json::ToJson; diff --git a/native/spark-expr/src/to_json.rs b/native/spark-expr/src/json_funcs/to_json.rs similarity index 99% rename from native/spark-expr/src/to_json.rs rename to native/spark-expr/src/json_funcs/to_json.rs index 91b46c6f04..3389ea3a0e 100644 --- a/native/spark-expr/src/to_json.rs +++ b/native/spark-expr/src/json_funcs/to_json.rs @@ -19,7 +19,7 @@ // of the Spark-specific compatibility features that we need (including // being able to specify Spark-compatible cast from all types to string) -use crate::cast::SparkCastOptions; +use crate::SparkCastOptions; use crate::{spark_cast, EvalMode}; use arrow_array::builder::StringBuilder; use arrow_array::{Array, ArrayRef, RecordBatch, StringArray, StructArray}; @@ -250,7 +250,7 @@ fn struct_to_json(array: &StructArray, timezone: &str) -> Result { #[cfg(test)] mod test { - use crate::to_json::struct_to_json; + use crate::json_funcs::to_json::struct_to_json; use arrow_array::types::Int32Type; use arrow_array::{Array, PrimitiveArray, StringArray}; use arrow_array::{ArrayRef, BooleanArray, Int32Array, StructArray}; diff --git a/native/spark-expr/src/lib.rs b/native/spark-expr/src/lib.rs index c614e1f0a2..ee58f6290d 100644 --- a/native/spark-expr/src/lib.rs +++ b/native/spark-expr/src/lib.rs @@ -42,9 +42,9 @@ mod struct_funcs; pub use negative::{create_negate_expr, NegativeExpr}; mod normalize_nan; +mod json_funcs; pub mod test_common; pub mod timezone; -mod to_json; mod unbound; pub use unbound::UnboundColumn; pub mod utils; @@ -66,10 +66,10 @@ pub use comet_scalar_funcs::create_comet_physical_fun; pub use datetime_funcs::*; pub use error::{SparkError, SparkResult}; pub use if_expr::IfExpr; +pub use json_funcs::ToJson; pub use list::{ArrayInsert, GetArrayStructFields, ListExtract}; pub use string_funcs::*; pub use struct_funcs::*; -pub use to_json::ToJson; /// Spark supports three evaluation modes when evaluating expressions, which affect /// the behavior when processing input values that are invalid or would result in an