Skip to content

Commit 38991d0

Browse files
committed
rebase
2 parents ba7494d + 3816d45 commit 38991d0

File tree

1 file changed

+10
-3
lines changed

1 file changed

+10
-3
lines changed

sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
2828
import org.apache.spark.sql.functions._
2929
import org.apache.spark.sql.internal.SQLConf
3030
import org.apache.spark.sql.test.SharedSQLContext
31-
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
31+
import org.apache.spark.sql.types._
3232

3333
class DatasetSuite extends QueryTest with SharedSQLContext {
3434
import testImplicits._
@@ -936,10 +936,17 @@ class DatasetSuite extends QueryTest with SharedSQLContext {
936936
}
937937

938938
test("SPARK-18284: Serializer should have correct nullable value") {
939-
val df1 = sparkContext.parallelize(Seq(1, 2, 3, 4), 1).toDF()
939+
val df1 = sparkContext.parallelize(Seq(1, 2, 3, 4), 1).toDF
940940
assert(df1.schema(0).nullable == false)
941-
val df2 = sparkContext.parallelize(Seq(Integer.valueOf(1), Integer.valueOf(2)), 1).toDF()
941+
val df2 = sparkContext.parallelize(Seq(Integer.valueOf(1), Integer.valueOf(2)), 1).toDF
942942
assert(df2.schema(0).nullable == true)
943+
944+
val df3 = sparkContext.parallelize(Seq(Seq(1, 2), Seq(3, 4)), 1).toDF
945+
assert(df3.schema(0).nullable == true)
946+
assert(df3.schema(0).dataType.asInstanceOf[ArrayType].containsNull == false)
947+
val df4 = sparkContext.parallelize(Seq(Seq("a", "b"), Seq("c", "d")), 1).toDF
948+
assert(df4.schema(0).nullable == true)
949+
assert(df4.schema(0).dataType.asInstanceOf[ArrayType].containsNull == true)
943950
}
944951

945952
Seq(true, false).foreach { eager =>

0 commit comments

Comments
 (0)