diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala index 095f6a970617b..caf1d206975f0 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala @@ -214,7 +214,8 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes OuterScopes.addOuterScope(MalformedClassObject) encodeDecodeTest( MalformedClassObject.MalformedNameExample(42), - "nested Scala class should work") + "nested Scala class should work", + useFallback = true) } object OuterLevelWithVeryVeryVeryLongClassName1 { @@ -284,7 +285,8 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes .OuterLevelWithVeryVeryVeryLongClassName19 .OuterLevelWithVeryVeryVeryLongClassName20 .MalformedNameExample(42), - "deeply nested Scala class should work") + "deeply nested Scala class should work", + useFallback = true) } productTest(PrimitiveData(1, 1, 1, 1, 1, 1, true)) @@ -555,8 +557,9 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes private def encodeDecodeTest[T : ExpressionEncoder]( input: T, - testName: String): Unit = { - testAndVerifyNotLeakingReflectionObjects(s"encode/decode for $testName: $input") { + testName: String, + useFallback: Boolean = false): Unit = { + testAndVerifyNotLeakingReflectionObjects(s"encode/decode for $testName: $input", useFallback) { val encoder = implicitly[ExpressionEncoder[T]] // Make sure encoder is serializable. @@ -650,9 +653,16 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes r } - private def testAndVerifyNotLeakingReflectionObjects(testName: String)(testFun: => Any): Unit = { - test(testName) { - verifyNotLeakingReflectionObjects(testFun) + private def testAndVerifyNotLeakingReflectionObjects( + testName: String, useFallback: Boolean = false)(testFun: => Any): Unit = { + if (useFallback) { + testFallback(testName) { + verifyNotLeakingReflectionObjects(testFun) + } + } else { + test(testName) { + verifyNotLeakingReflectionObjects(testFun) + } } } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index e46599dc19a8b..f843784ec52a9 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -961,7 +961,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase { } test("ANSI mode: cast string to timestamp with parse error") { - val activeConf = conf + val activeConf = conf.clone() new ParVector(ALL_TIMEZONES.toVector).foreach { zid => def checkCastWithParseError(str: String): Unit = { checkExceptionInExpression[DateTimeException]( diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala index 7c70ab98e4183..6bbe99ba7d786 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala @@ -51,6 +51,15 @@ trait CodegenInterpretedPlanTest extends PlanTest { super.test(testName + " (interpreted path)", testTags: _*)(testFun)(pos) } } + + protected def testFallback( + testName: String, + testTags: Tag*)(testFun: => Any)(implicit pos: source.Position): Unit = { + val codegenMode = CodegenObjectFactoryMode.FALLBACK.toString + withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> codegenMode) { + super.test(testName, testTags: _*)(testFun)(pos) + } + } } /**