@@ -67,6 +67,19 @@ case class AllDataTypes(
6767 byteField : Byte ,
6868 booleanField : Boolean )
6969
70+ case class AllDataTypesWithNonPrimitiveType (
71+ stringField : String ,
72+ intField : Int ,
73+ longField : Long ,
74+ floatField : Float ,
75+ doubleField : Double ,
76+ shortField : Short ,
77+ byteField : Byte ,
78+ booleanField : Boolean ,
79+ array : Seq [Int ],
80+ map : Map [Int , String ],
81+ nested : Nested )
82+
7083class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
7184 TestData // Load test data tables.
7285
@@ -119,6 +132,31 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
119132 }
120133 }
121134
135+ test(" Read/Write All Types with non-primitive type" ) {
136+ val tempDir = getTempFilePath(" parquetTest" ).getCanonicalPath
137+ val range = (0 to 255 )
138+ TestSQLContext .sparkContext.parallelize(range)
139+ .map(x => AllDataTypesWithNonPrimitiveType (
140+ s " $x" , x, x.toLong, x.toFloat, x.toDouble, x.toShort, x.toByte, x % 2 == 0 ,
141+ Seq (x), Map (x -> s " $x" ), Nested (x, s " $x" )))
142+ .saveAsParquetFile(tempDir)
143+ val result = parquetFile(tempDir).collect()
144+ range.foreach {
145+ i =>
146+ assert(result(i).getString(0 ) == s " $i" , s " row $i String field did not match, got ${result(i).getString(0 )}" )
147+ assert(result(i).getInt(1 ) === i)
148+ assert(result(i).getLong(2 ) === i.toLong)
149+ assert(result(i).getFloat(3 ) === i.toFloat)
150+ assert(result(i).getDouble(4 ) === i.toDouble)
151+ assert(result(i).getShort(5 ) === i.toShort)
152+ assert(result(i).getByte(6 ) === i.toByte)
153+ assert(result(i).getBoolean(7 ) === (i % 2 == 0 ))
154+ assert(result(i)(8 ) === Seq (i))
155+ assert(result(i)(9 ) === Map (i -> s " $i" ))
156+ assert(result(i)(10 ) === new GenericRow (Array [Any ](i, s " $i" )))
157+ }
158+ }
159+
122160 test(" self-join parquet files" ) {
123161 val x = ParquetTestData .testData.as(' x )
124162 val y = ParquetTestData .testData.as(' y )
0 commit comments