-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-41378][SQL] Support Column Stats in DS v2 #38904
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
5b5019f
c77252e
2a1422b
a6089ec
0cddab9
5a872d8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,60 @@ | ||
| /* | ||
| * Licensed to the Apache Software Foundation (ASF) under one or more | ||
| * contributor license agreements. See the NOTICE file distributed with | ||
| * this work for additional information regarding copyright ownership. | ||
| * The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| * (the "License"); you may not use this file except in compliance with | ||
| * the License. You may obtain a copy of the License at | ||
| * | ||
| * http://www.apache.org/licenses/LICENSE-2.0 | ||
| * | ||
| * Unless required by applicable law or agreed to in writing, software | ||
| * distributed under the License is distributed on an "AS IS" BASIS, | ||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| * See the License for the specific language governing permissions and | ||
| * limitations under the License. | ||
| */ | ||
|
|
||
| package org.apache.spark.sql.connector.read.stats; | ||
|
|
||
| import org.apache.spark.annotation.Evolving; | ||
| import java.math.BigInteger; | ||
| import java.util.Optional; | ||
| import java.util.OptionalLong; | ||
|
|
||
| /** | ||
| * An interface to represent column statistics, which is part of | ||
| * {@link Statistics}. | ||
| * | ||
| * @since 3.4.0 | ||
| */ | ||
| @Evolving | ||
| public interface ColumnStatistics { | ||
| default Optional<BigInteger> distinctCount() { | ||
| return Optional.empty(); | ||
| } | ||
|
|
||
| default Optional<Object> min() { | ||
| return Optional.empty(); | ||
| } | ||
|
|
||
| default Optional<Object> max() { | ||
| return Optional.empty(); | ||
| } | ||
|
|
||
| default Optional<BigInteger> nullCount() { | ||
| return Optional.empty(); | ||
| } | ||
|
|
||
| default OptionalLong avgLen() { | ||
| return OptionalLong.empty(); | ||
| } | ||
|
|
||
| default OptionalLong maxLen() { | ||
| return OptionalLong.empty(); | ||
| } | ||
|
|
||
| default Optional<Histogram> histogram() { | ||
| return Optional.empty(); | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,33 @@ | ||
| /* | ||
| * Licensed to the Apache Software Foundation (ASF) under one or more | ||
| * contributor license agreements. See the NOTICE file distributed with | ||
| * this work for additional information regarding copyright ownership. | ||
| * The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| * (the "License"); you may not use this file except in compliance with | ||
| * the License. You may obtain a copy of the License at | ||
| * | ||
| * http://www.apache.org/licenses/LICENSE-2.0 | ||
| * | ||
| * Unless required by applicable law or agreed to in writing, software | ||
| * distributed under the License is distributed on an "AS IS" BASIS, | ||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| * See the License for the specific language governing permissions and | ||
| * limitations under the License. | ||
| */ | ||
|
|
||
| package org.apache.spark.sql.connector.read.stats; | ||
|
|
||
| import org.apache.spark.annotation.Evolving; | ||
|
|
||
| /** | ||
| * An interface to represent an equi-height histogram, which is a part of | ||
| * {@link ColumnStatistics}. Equi-height histogram represents the distribution of | ||
| * a column's values by a sequence of bins. | ||
| * | ||
| * @since 3.4.0 | ||
| */ | ||
| @Evolving | ||
| public interface Histogram { | ||
| double height(); | ||
| HistogramBin[] bins(); | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,32 @@ | ||
| /* | ||
| * Licensed to the Apache Software Foundation (ASF) under one or more | ||
| * contributor license agreements. See the NOTICE file distributed with | ||
| * this work for additional information regarding copyright ownership. | ||
| * The ASF licenses this file to You under the Apache License, Version 2.0 | ||
| * (the "License"); you may not use this file except in compliance with | ||
| * the License. You may obtain a copy of the License at | ||
| * | ||
| * http://www.apache.org/licenses/LICENSE-2.0 | ||
| * | ||
| * Unless required by applicable law or agreed to in writing, software | ||
| * distributed under the License is distributed on an "AS IS" BASIS, | ||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| * See the License for the specific language governing permissions and | ||
| * limitations under the License. | ||
| */ | ||
|
|
||
| package org.apache.spark.sql.connector.read.stats; | ||
|
|
||
| import org.apache.spark.annotation.Evolving; | ||
|
|
||
| /** | ||
| * An interface to represent a bin in an equi-height histogram. | ||
| * | ||
| * @since 3.4.0 | ||
| */ | ||
| @Evolving | ||
| public interface HistogramBin { | ||
| double lo(); | ||
| double hi(); | ||
| long ndv(); | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,10 +17,11 @@ | |
|
|
||
| package org.apache.spark.sql.connector.catalog | ||
|
|
||
| import java.math.BigInteger | ||
| import java.time.{Instant, ZoneId} | ||
| import java.time.temporal.ChronoUnit | ||
| import java.util | ||
| import java.util.OptionalLong | ||
| import java.util.{HashMap, Optional, OptionalLong} | ||
|
|
||
| import scala.collection.mutable | ||
|
|
||
|
|
@@ -34,6 +35,7 @@ import org.apache.spark.sql.connector.expressions._ | |
| import org.apache.spark.sql.connector.metric.{CustomMetric, CustomTaskMetric} | ||
| import org.apache.spark.sql.connector.read._ | ||
| import org.apache.spark.sql.connector.read.partitioning.{KeyGroupedPartitioning, Partitioning, UnknownPartitioning} | ||
| import org.apache.spark.sql.connector.read.stats.{ColumnStatistics, Histogram, HistogramBin, Statistics} | ||
| import org.apache.spark.sql.connector.write._ | ||
| import org.apache.spark.sql.connector.write.streaming.{StreamingDataWriterFactory, StreamingWrite} | ||
| import org.apache.spark.sql.internal.connector.SupportsStreamingUpdateAsAppend | ||
|
|
@@ -273,7 +275,23 @@ abstract class InMemoryBaseTable( | |
| } | ||
| } | ||
|
|
||
| case class InMemoryStats(sizeInBytes: OptionalLong, numRows: OptionalLong) extends Statistics | ||
| case class InMemoryStats( | ||
| sizeInBytes: OptionalLong, | ||
| numRows: OptionalLong, | ||
| override val columnStats: Optional[HashMap[NamedReference, ColumnStatistics]]) | ||
| extends Statistics | ||
| case class InMemoryColumnStats ( | ||
| override val distinctCount: Optional[BigInteger], | ||
dongjoon-hyun marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| override val min: Optional[AnyRef], | ||
| override val max: Optional[AnyRef], | ||
| override val nullCount: Optional[BigInteger], | ||
| override val avgLen: OptionalLong, | ||
| override val maxLen: OptionalLong, | ||
| override val histogram: Optional[Histogram]) extends ColumnStatistics | ||
|
|
||
| case class InMemoryHistogramBin(lo: Double, hi: Double, ndv: Long) extends HistogramBin | ||
|
|
||
| case class InMemoryHistogram(height: Double, bins: Array[HistogramBin]) extends Histogram | ||
|
|
||
| abstract class BatchScanBaseClass( | ||
| var data: Seq[InputPartition], | ||
|
|
@@ -285,7 +303,7 @@ abstract class InMemoryBaseTable( | |
|
|
||
| override def estimateStatistics(): Statistics = { | ||
| if (data.isEmpty) { | ||
| return InMemoryStats(OptionalLong.of(0L), OptionalLong.of(0L)) | ||
| return InMemoryStats(OptionalLong.of(0L), OptionalLong.of(0L), Optional.empty()) | ||
| } | ||
|
|
||
| val inputPartitions = data.map(_.asInstanceOf[BufferedRows]) | ||
|
|
@@ -294,7 +312,30 @@ abstract class InMemoryBaseTable( | |
| val objectHeaderSizeInBytes = 12L | ||
| val rowSizeInBytes = objectHeaderSizeInBytes + schema.defaultSize | ||
| val sizeInBytes = numRows * rowSizeInBytes | ||
| InMemoryStats(OptionalLong.of(sizeInBytes), OptionalLong.of(numRows)) | ||
|
|
||
| val map = new util.HashMap[NamedReference, ColumnStatistics]() | ||
| val colNames = readSchema.fields.map(_.name) | ||
| for (col <- colNames) { | ||
| val fieldReference = FieldReference(col) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
| // put some fake data for testing only | ||
| val bin1 = InMemoryHistogramBin(1, 2, 5L) | ||
| val bin2 = InMemoryHistogramBin(3, 4, 5L) | ||
| val bin3 = InMemoryHistogramBin(5, 6, 5L) | ||
| val bin4 = InMemoryHistogramBin(7, 8, 5L) | ||
| val bin5 = InMemoryHistogramBin(9, 10, 5L) | ||
|
||
| val colStats = InMemoryColumnStats( | ||
| Optional.of[BigInteger](BigInteger.valueOf(5)), | ||
| Optional.of[AnyRef](Integer.valueOf(0)), | ||
| Optional.of[AnyRef](Integer.valueOf(5)), | ||
| Optional.of[BigInteger](BigInteger.valueOf(0)), | ||
| OptionalLong.of(111L), | ||
| OptionalLong.of(1111L), | ||
| Optional.of[Histogram](InMemoryHistogram(5, Array(bin1, bin2, bin3, bin4, bin5))) | ||
| ) | ||
| map.put(fieldReference, colStats) | ||
| } | ||
|
|
||
| InMemoryStats(OptionalLong.of(sizeInBytes), OptionalLong.of(numRows), Optional.of(map)) | ||
| } | ||
|
|
||
| override def outputPartitioning(): Partitioning = { | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.