@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919
2020import org .apache .spark .sql .functions .{from_json , struct , to_json }
2121import org .apache .spark .sql .test .SharedSQLContext
22- import org .apache .spark .sql .types .{CalendarIntervalType , IntegerType , StructType }
22+ import org .apache .spark .sql .types .{CalendarIntervalType , IntegerType , StructType , TimestampType }
2323
2424class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
2525 import testImplicits ._
@@ -105,6 +105,16 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
105105 Row (Row (1 )) :: Nil )
106106 }
107107
108+ test(" from_json with option" ) {
109+ val df = Seq (""" {"time": "26/08/2015 18:00"}""" ).toDS()
110+ val schema = new StructType ().add(" time" , TimestampType )
111+ val options = Map (" timestampFormat" -> " dd/MM/yyyy HH:mm" )
112+
113+ checkAnswer(
114+ df.select(from_json($" value" , schema, options)),
115+ Row (Row (java.sql.Timestamp .valueOf(" 2015-08-26 18:00:00.0" ))))
116+ }
117+
108118 test(" from_json missing columns" ) {
109119 val df = Seq (""" {"a": 1}""" ).toDS()
110120 val schema = new StructType ().add(" b" , IntegerType )
@@ -131,6 +141,15 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
131141 Row (""" {"_1":1}""" ) :: Nil )
132142 }
133143
144+ test(" to_json with option" ) {
145+ val df = Seq (Tuple1 (Tuple1 (java.sql.Timestamp .valueOf(" 2015-08-26 18:00:00.0" )))).toDF(" a" )
146+ val options = Map (" timestampFormat" -> " dd/MM/yyyy HH:mm" )
147+
148+ checkAnswer(
149+ df.select(to_json($" a" , options)),
150+ Row (""" {"_1":"26/08/2015 18:00"}""" ) :: Nil )
151+ }
152+
134153 test(" to_json unsupported type" ) {
135154 val df = Seq (Tuple1 (Tuple1 (" interval -3 month 7 hours" ))).toDF(" a" )
136155 .select(struct($" a._1" .cast(CalendarIntervalType ).as(" a" )).as(" c" ))
0 commit comments