Skip to content

Commit e2c3b9d

Browse files
committed
fix test case
1 parent 3e7beb8 commit e2c3b9d

File tree

2 files changed

+16
-14
lines changed

2 files changed

+16
-14
lines changed

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,8 @@ import scala.util.Random
2626
import org.scalatest.Matchers._
2727

2828
import org.apache.spark.SparkException
29-
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Union}
29+
import org.apache.spark.sql.catalyst.TableIdentifier
30+
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project, Union}
3031
import org.apache.spark.sql.execution.QueryExecution
3132
import org.apache.spark.sql.execution.aggregate.HashAggregateExec
3233
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ReusedExchangeExec, ShuffleExchange}
@@ -1585,4 +1586,15 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
15851586
val d = sampleDf.withColumn("c", monotonically_increasing_id).select($"c").collect
15861587
assert(d.size == d.distinct.size)
15871588
}
1589+
1590+
test("SPARK-17625: data source table in InMemoryCatalog should guarantee output consistency") {
1591+
val tableName = "tbl"
1592+
withTable(tableName) {
1593+
spark.range(10).select('id as 'i, 'id as 'j).write.saveAsTable(tableName)
1594+
val relation = spark.sessionState.catalog.lookupRelation(TableIdentifier(tableName))
1595+
val expr = relation.resolve("i")
1596+
val qe = spark.sessionState.executePlan(Project(Seq(expr), relation))
1597+
qe.assertAnalyzed()
1598+
}
1599+
}
15881600
}

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@ import java.math.MathContext
2222
import java.sql.{Date, Timestamp}
2323

2424
import org.apache.spark.{AccumulatorSuite, SparkException}
25-
import org.apache.spark.sql.catalyst.plans.logical.Project
25+
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
26+
import org.apache.spark.sql.catalyst.expressions.SortOrder
27+
import org.apache.spark.sql.catalyst.plans.logical.Aggregate
2628
import org.apache.spark.sql.catalyst.util.StringUtils
2729
import org.apache.spark.sql.execution.aggregate
2830
import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, CartesianProductExec, SortMergeJoinExec}
@@ -2308,18 +2310,6 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
23082310
}
23092311
}
23102312

2311-
test("data source table created in InMemoryCatalog should guarantee resolving consistency") {
2312-
val table = "tbl"
2313-
withTable("tbl") {
2314-
sql("CREATE TABLE tbl(i INT, j STRING) USING parquet")
2315-
val tableIdent = spark.sessionState.sqlParser.parseTableIdentifier(table)
2316-
val relation = spark.sessionState.catalog.lookupRelation(tableIdent)
2317-
val expr = relation.resolve("i")
2318-
val plan = Dataset.ofRows(spark, Project(Seq(expr), relation))
2319-
plan.queryExecution.assertAnalyzed()
2320-
}
2321-
}
2322-
23232313
test("Eliminate noop ordinal ORDER BY") {
23242314
withSQLConf(SQLConf.ORDER_BY_ORDINAL.key -> "true") {
23252315
val plan1 = sql("SELECT 1.0, 'abc', year(current_date()) ORDER BY 1, 2, 3")

0 commit comments

Comments
 (0)