@@ -28,7 +28,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
2828import org .apache .spark .sql .functions ._
2929import org .apache .spark .sql .internal .SQLConf
3030import org .apache .spark .sql .test .SharedSQLContext
31- import org .apache .spark .sql .types .{ IntegerType , StringType , StructField , StructType }
31+ import org .apache .spark .sql .types ._
3232
3333class DatasetSuite extends QueryTest with SharedSQLContext {
3434 import testImplicits ._
@@ -936,10 +936,17 @@ class DatasetSuite extends QueryTest with SharedSQLContext {
936936 }
937937
938938 test(" SPARK-18284: Serializer should have correct nullable value" ) {
939- val df1 = sparkContext.parallelize(Seq (1 , 2 , 3 , 4 ), 1 ).toDF()
939+ val df1 = sparkContext.parallelize(Seq (1 , 2 , 3 , 4 ), 1 ).toDF
940940 assert(df1.schema(0 ).nullable == false )
941- val df2 = sparkContext.parallelize(Seq (Integer .valueOf(1 ), Integer .valueOf(2 )), 1 ).toDF()
941+ val df2 = sparkContext.parallelize(Seq (Integer .valueOf(1 ), Integer .valueOf(2 )), 1 ).toDF
942942 assert(df2.schema(0 ).nullable == true )
943+
944+ val df3 = sparkContext.parallelize(Seq (Seq (1 , 2 ), Seq (3 , 4 )), 1 ).toDF
945+ assert(df3.schema(0 ).nullable == true )
946+ assert(df3.schema(0 ).dataType.asInstanceOf [ArrayType ].containsNull == false )
947+ val df4 = sparkContext.parallelize(Seq (Seq (" a" , " b" ), Seq (" c" , " d" )), 1 ).toDF
948+ assert(df4.schema(0 ).nullable == true )
949+ assert(df4.schema(0 ).dataType.asInstanceOf [ArrayType ].containsNull == true )
943950 }
944951
945952 Seq (true , false ).foreach { eager =>
0 commit comments