File tree Expand file tree Collapse file tree 1 file changed +1
-11
lines changed
sql/core/src/test/scala/org/apache/spark/sql/parquet Expand file tree Collapse file tree 1 file changed +1
-11
lines changed Original file line number Diff line number Diff line change @@ -161,12 +161,6 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
161161 TestSQLContext .isParquetBinaryAsString)
162162 }
163163 val schemaRDD = new SchemaRDD (TestSQLContext , parquetRelation)
164- val resultWithString = schemaRDD.collect
165- range.foreach {
166- i =>
167- assert(resultWithString(i).getInt(0 ) === i)
168- assert(resultWithString(i)(1 ) === s " val_ $i" )
169- }
170164
171165 schemaRDD.registerTempTable(" tmp" )
172166 checkAnswer(
@@ -433,15 +427,11 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
433427 }
434428
435429 test(" Insert (appending) to same table via Scala API" ) {
436- // TODO: why does collecting break things? It seems InsertIntoParquet::execute() is
437- // executed twice otherwise?!
438430 sql(" INSERT INTO testsource SELECT * FROM testsource" )
439431 val double_rdd = sql(" SELECT * FROM testsource" ).collect()
440432 assert(double_rdd != null )
441433 assert(double_rdd.size === 30 )
442- for (i <- (0 to 14 )) {
443- assert(double_rdd(i) === double_rdd(i+ 15 ), s " error: lines $i and ${i+ 15 } to not match " )
444- }
434+
445435 // let's restore the original test data
446436 Utils .deleteRecursively(ParquetTestData .testDir)
447437 ParquetTestData .writeFile()
You can’t perform that action at this time.
0 commit comments