-
Notifications
You must be signed in to change notification settings - Fork 248
Chore: Fix Scala code warnings - Spark module #2558
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
2a0013f
4c47c69
9e5d9f8
01ee249
7e5b0fd
a5224e9
feb749e
1378f1f
be3edd7
7cb8f54
7003584
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -50,11 +50,12 @@ object SourceFilterSerde extends Logging { | |
| .setDatatype(dataType.get) | ||
| .build() | ||
| Some( | ||
| field.dataType, | ||
| ExprOuterClass.Expr | ||
| .newBuilder() | ||
| .setBound(boundExpr) | ||
| .build()) | ||
| ( | ||
| field.dataType, | ||
| ExprOuterClass.Expr | ||
| .newBuilder() | ||
| .setBound(boundExpr) | ||
| .build())) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not sure if this formatting error is intended ? |
||
| } else { | ||
| None | ||
| } | ||
|
|
@@ -80,8 +81,8 @@ object SourceFilterSerde extends Logging { | |
| // refer to org.apache.spark.sql.catalyst.CatalystTypeConverters.CatalystTypeConverter#toScala | ||
| dataType match { | ||
| case _: BooleanType => exprBuilder.setBoolVal(value.asInstanceOf[Boolean]) | ||
| case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte]) | ||
| case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short]) | ||
| case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte].toInt) | ||
| case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short].toInt) | ||
| case _: IntegerType => exprBuilder.setIntVal(value.asInstanceOf[Int]) | ||
| case _: LongType => exprBuilder.setLongVal(value.asInstanceOf[Long]) | ||
| case _: FloatType => exprBuilder.setFloatVal(value.asInstanceOf[Float]) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,6 +19,7 @@ | |
|
|
||
| package org.apache.comet.serde | ||
|
|
||
| import scala.annotation.unused | ||
| import scala.collection.mutable.ListBuffer | ||
| import scala.jdk.CollectionConverters._ | ||
|
|
||
|
|
@@ -1804,7 +1805,7 @@ object QueryPlanSerde extends Logging with CometExprShim { | |
| .setFileSize(file.fileSize) | ||
| partitionBuilder.addPartitionedFile(fileBuilder.build()) | ||
| }) | ||
| nativeScanBuilder.addFilePartitions(partitionBuilder.build()) | ||
| nativeScanBuilder.addFilePartitions(partitionBuilder.build()); () | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This seems incomprehensible. |
||
| } | ||
| } | ||
|
|
||
|
|
@@ -1886,7 +1887,7 @@ trait CometExpressionSerde[T <: Expression] { | |
| * @return | ||
| * Support level (Compatible, Incompatible, or Unsupported). | ||
| */ | ||
| def getSupportLevel(expr: T): SupportLevel = Compatible(None) | ||
| def getSupportLevel(@unused expr: T): SupportLevel = Compatible(None) | ||
|
|
||
| /** | ||
| * Convert a Spark expression into a protocol buffer representation that can be passed into | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -19,6 +19,7 @@ | |
|
|
||
| package org.apache.comet.serde | ||
|
|
||
| import scala.annotation.unused | ||
| import scala.jdk.CollectionConverters._ | ||
|
|
||
| import org.apache.spark.sql.catalyst.expressions.{Attribute, EvalMode} | ||
|
|
@@ -414,7 +415,7 @@ trait CometCovBase { | |
| statsType: Int, | ||
| inputs: Seq[Attribute], | ||
| binding: Boolean, | ||
| conf: SQLConf): Option[ExprOuterClass.AggExpr] = { | ||
| @unused conf: SQLConf): Option[ExprOuterClass.AggExpr] = { | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If this is unused can we just remove it? |
||
| val child1Expr = exprToProto(cov.left, inputs, binding) | ||
| val child2Expr = exprToProto(cov.right, inputs, binding) | ||
| val dataType = serializeDataType(cov.dataType) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -100,13 +100,13 @@ object CometDriverPlugin extends Logging { | |
| val extensions = conf.get(extensionKey, "") | ||
| if (extensions.isEmpty) { | ||
| logInfo(s"Setting $extensionKey=$extensionClass") | ||
| conf.set(extensionKey, extensionClass) | ||
| conf.set(extensionKey, extensionClass); () | ||
| } else { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Perhaps an unintended |
||
| val currentExtensions = extensions.split(",").map(_.trim) | ||
| if (!currentExtensions.contains(extensionClass)) { | ||
| val newValue = s"$extensions,$extensionClass" | ||
| logInfo(s"Setting $extensionKey=$newValue") | ||
| conf.set(extensionKey, newValue) | ||
| conf.set(extensionKey, newValue); () | ||
| } | ||
| } | ||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -146,7 +146,7 @@ case class CometBroadcastExchangeExec( | |
| longMetric("numOutputRows") += numRows | ||
| if (numRows >= maxBroadcastRows) { | ||
| throw QueryExecutionErrors.cannotBroadcastTableOverMaxTableRowsError( | ||
| maxBroadcastRows, | ||
| maxBroadcastRows.toLong, | ||
| numRows) | ||
| } | ||
|
|
||
|
|
@@ -198,7 +198,7 @@ case class CometBroadcastExchangeExec( | |
|
|
||
| override protected def doPrepare(): Unit = { | ||
| // Materialize the future. | ||
| relationFuture | ||
| relationFuture; () | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Perhaps an unintended |
||
| } | ||
|
|
||
| override protected def doExecute(): RDD[InternalRow] = { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -60,7 +60,9 @@ class CometNativeShuffleWriter[K, V]( | |
| private val OFFSET_LENGTH = 8 | ||
|
|
||
| var partitionLengths: Array[Long] = _ | ||
| var mapStatus: MapStatus = _ | ||
| // Store MapStatus opaquely as AnyRef, | ||
| // to avoid private[spark] visibility issues; cast back when needed. | ||
| var mapStatus: AnyRef = _ | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Any reason why we are removing type assignment of |
||
|
|
||
| override def write(inputs: Iterator[Product2[K, V]]): Unit = { | ||
| val shuffleBlockResolver = | ||
|
|
@@ -302,7 +304,7 @@ class CometNativeShuffleWriter[K, V]( | |
|
|
||
| override def stop(success: Boolean): Option[MapStatus] = { | ||
| if (success) { | ||
| Some(mapStatus) | ||
| Some(mapStatus.asInstanceOf[MapStatus]) | ||
| } else { | ||
| None | ||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,7 +21,6 @@ package org.apache.spark.sql.comet.shims | |
|
|
||
|
|
||
| import org.apache.hadoop.fs.Path | ||
|
|
||
| import org.apache.spark.sql.SparkSession | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unintended formatting issue ? |
||
| import org.apache.spark.sql.catalyst.InternalRow | ||
| import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression} | ||
|
|
@@ -32,6 +31,8 @@ import org.apache.spark.sql.sources.Filter | |
| import org.apache.spark.sql.types.StructType | ||
| import org.apache.spark.SPARK_VERSION_SHORT | ||
| import org.apache.spark.util.VersionUtils | ||
|
|
||
| import scala.annotation.nowarn | ||
| import scala.math.Ordering.Implicits._ | ||
|
|
||
| trait ShimCometScanExec { | ||
|
|
@@ -42,7 +43,7 @@ trait ShimCometScanExec { | |
|
|
||
| def isSparkVersionAtLeast355: Boolean = { | ||
| VersionUtils.majorMinorPatchVersion(SPARK_VERSION_SHORT) match { | ||
| case Some((major, minor, patch)) => (major, minor, patch) >= (3, 5, 5) | ||
| case Some((major, minor, patch)) => (major, minor, patch) >= ((3, 5, 5)) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unintended formatting issue ? |
||
| case None => | ||
| throw new IllegalArgumentException(s"Malformed Spark version: $SPARK_VERSION_SHORT") | ||
| } | ||
|
|
@@ -62,7 +63,7 @@ trait ShimCometScanExec { | |
| fsRelation.fileFormat.fileConstantMetadataExtractors, | ||
| options) | ||
|
|
||
| // see SPARK-39634 | ||
| @nowarn // Temporary implementation, see SPARK-39634 | ||
| protected def isNeededForSchema(sparkSchema: StructType): Boolean = false | ||
|
|
||
| protected def getPartitionedFile(f: FileStatusWithMetadata, p: PartitionDirectory): PartitionedFile = | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Not sure if this comment is necessary ? or may be I am missing something here