Skip to content

Commit 00a90da

Browse files
committed
resolve conflict
1 parent 6b9f42c commit 00a90da

File tree

1 file changed

+30
-18
lines changed
  • sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions

1 file changed

+30
-18
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala

Lines changed: 30 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -104,24 +104,29 @@ private[sql] final case class BucketTransform(
104104
columns: Seq[NamedReference],
105105
sortedColumns: Seq[NamedReference] = Seq.empty[NamedReference]) extends RewritableTransform {
106106

107-
override val name: String = "bucket"
107+
override val name: String = if (sortedColumns.nonEmpty) "sortedBucket" else "bucket"
108108

109109
override def references: Array[NamedReference] = {
110110
arguments.collect { case named: NamedReference => named }
111111
}
112112

113-
override def arguments: Array[Expression] = numBuckets +: columns.toArray
114-
115-
override def toString: String =
113+
override def arguments: Array[Expression] = {
116114
if (sortedColumns.nonEmpty) {
117-
s"bucket(${arguments.map(_.describe).mkString(", ")}," +
118-
s" ${sortedColumns.map(_.describe).mkString(", ")})"
115+
(columns.toArray :+ numBuckets) ++ sortedColumns
119116
} else {
120-
s"bucket(${arguments.map(_.describe).mkString(", ")})"
117+
numBuckets +: columns.toArray
121118
}
119+
}
120+
121+
override def toString: String = s"$name(${arguments.map(_.describe).mkString(", ")})"
122122

123123
override def withReferences(newReferences: Seq[NamedReference]): Transform = {
124-
this.copy(columns = newReferences)
124+
if (sortedColumns.isEmpty) {
125+
this.copy(columns = newReferences)
126+
} else {
127+
val splits = newReferences.grouped(columns.length).toList
128+
this.copy(columns = splits(0), sortedColumns = splits(1))
129+
}
125130
}
126131
}
127132

@@ -140,19 +145,26 @@ private[sql] object BucketTransform {
140145
}
141146

142147
def unapply(transform: Transform): Option[(Int, NamedReference, NamedReference)] =
143-
transform match {
144-
case NamedTransform("bucket", Seq(
145-
Lit(value: Int, IntegerType),
146-
Ref(partCols: Seq[String]),
147-
Ref(sortCols: Seq[String]))) =>
148-
Some((value, FieldReference(partCols), FieldReference(sortCols)))
149-
case NamedTransform("bucket", Seq(
150-
Lit(value: Int, IntegerType),
151-
Ref(partCols: Seq[String]))) =>
148+
transform match {
149+
case NamedTransform("sortedBucket", s) =>
150+
var index: Int = -1
151+
var posOfLit: Int = -1
152+
var numOfBucket: Int = -1
153+
s.foreach {
154+
case Lit(value: Int, IntegerType) =>
155+
numOfBucket = value
156+
index = index + 1
157+
posOfLit = index
158+
case _ => index = index + 1
159+
}
160+
val splits = s.splitAt(posOfLit)
161+
Some(numOfBucket, FieldReference(
162+
splits._1.map(_.describe)), FieldReference(splits._2.drop(1).map(_.describe)))
163+
case NamedTransform("bucket", Seq(Lit(value: Int, IntegerType), Ref(partCols: Seq[String]))) =>
152164
Some((value, FieldReference(partCols), FieldReference(Seq.empty[String])))
153165
case _ =>
154166
None
155-
}
167+
}
156168
}
157169

158170
private[sql] final case class ApplyTransform(

0 commit comments

Comments
 (0)