Skip to content

Commit fbf8a88

Browse files
committed
Tweak the test.
1 parent 2cf6bbb commit fbf8a88

File tree

2 files changed

+14
-10
lines changed

2 files changed

+14
-10
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.util.Calendar
2121

2222
import org.apache.spark.SparkFunSuite
2323
import org.apache.spark.sql.catalyst.InternalRow
24-
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
24+
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2525
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils, GenericArrayData, PermissiveMode}
2626
import org.apache.spark.sql.types._
2727
import org.apache.spark.unsafe.types.UTF8String
@@ -626,7 +626,7 @@ class JsonExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
626626
val mapType2 = MapType(IntegerType, CalendarIntervalType)
627627
val schema2 = StructType(StructField("a", mapType2) :: Nil)
628628
val struct2 = Literal.create(null, schema2)
629-
intercept[UnresolvedException[_]] {
629+
intercept[TreeNodeException[_]] {
630630
checkEvaluation(
631631
StructsToJson(Map.empty, struct2, gmtId),
632632
null

sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,16 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
195195
Row("""{"_1":"26/08/2015 18:00"}""") :: Nil)
196196
}
197197

198+
test("to_json - key types of map don't matter") {
199+
// interval type is invalid for converting to JSON. However, the keys of a map are treated
200+
// as strings, so its type doesn't matter.
201+
val df = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a")
202+
.select(struct(map($"a._1".cast(CalendarIntervalType), lit("a")).as("col1")).as("c"))
203+
checkAnswer(
204+
df.select(to_json($"c")),
205+
Row("""{"col1":{"interval -3 months 7 hours":"a"}}""") :: Nil)
206+
}
207+
198208
test("to_json unsupported type") {
199209
val baseDf = Seq(Tuple1(Tuple1("interval -3 month 7 hours"))).toDF("a")
200210
val df = baseDf.select(struct($"a._1".cast(CalendarIntervalType).as("a")).as("c"))
@@ -205,17 +215,11 @@ class JsonFunctionsSuite extends QueryTest with SharedSQLContext {
205215
assert(e.getMessage.contains(
206216
"Unable to convert column a of type calendarinterval to JSON."))
207217

208-
// interval type is invalid for converting to JSON. However, the keys of a map are treated
209-
// as strings, so its type doesn't matter.
218+
// interval type is invalid for converting to JSON. We can't use it as value type of a map.
210219
val df2 = baseDf
211-
.select(struct(map($"a._1".cast(CalendarIntervalType), lit("a")).as("col1")).as("c"))
212-
val df3 = baseDf
213220
.select(struct(map(lit("a"), $"a._1".cast(CalendarIntervalType)).as("col1")).as("c"))
214-
checkAnswer(
215-
df2.select(to_json($"c")),
216-
Row("""{"col1":{"interval -3 months 7 hours":"a"}}""") :: Nil)
217221
val e2 = intercept[AnalysisException] {
218-
df3.select(to_json($"c")).collect()
222+
df2.select(to_json($"c")).collect()
219223
}
220224
assert(e2.getMessage.contains("Unable to convert column col1 of type calendarinterval to JSON"))
221225
}

0 commit comments

Comments
 (0)