Skip to content

Commit 4df4f7c

Browse files
committed
fix comments
1 parent b1ce4b5 commit 4df4f7c

File tree

2 files changed

+8
-10
lines changed

2 files changed

+8
-10
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,8 @@ abstract class Optimizer(catalogManager: CatalogManager)
195195
EliminateSorts) :+
196196
Batch("Decimal Optimizations", fixedPoint,
197197
DecimalAggregates) :+
198+
// This batch must run after "Decimal Optimizations", as that one may change the
199+
// aggregate distinct column
198200
Batch("Distinct Aggregate Rewrite", Once,
199201
RewriteDistinctAggregates) :+
200202
Batch("Object Expressions Optimization", fixedPoint,

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2557,16 +2557,12 @@ class DataFrameSuite extends QueryTest
25572557
}
25582558

25592559
test("SPARK-32816: aggregating multiple distinct DECIMAL columns") {
2560-
withTempPath { path =>
2561-
spark.range(0, 100, 1, 1)
2562-
.selectExpr("id", "cast(id as decimal(9, 0)) as decimal_col")
2563-
.write.mode("overwrite")
2564-
.parquet(path.getAbsolutePath)
2565-
spark.read.parquet(path.getAbsolutePath).createOrReplaceTempView("test_table")
2566-
checkAnswer(
2567-
sql("select avg(distinct decimal_col), sum(distinct decimal_col) from test_table"),
2568-
Row(49.5, 4950))
2569-
}
2560+
spark.range(0, 100, 1, 1)
2561+
.selectExpr("id", "cast(id as decimal(9, 0)) as decimal_col")
2562+
.createOrReplaceTempView("test_table")
2563+
checkAnswer(
2564+
sql("select avg(distinct decimal_col), sum(distinct decimal_col) from test_table"),
2565+
Row(49.5, 4950))
25702566
}
25712567
}
25722568

0 commit comments

Comments
 (0)