File tree Expand file tree Collapse file tree 2 files changed +8
-10
lines changed
catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer
core/src/test/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +8
-10
lines changed Original file line number Diff line number Diff line change @@ -195,6 +195,8 @@ abstract class Optimizer(catalogManager: CatalogManager)
195195 EliminateSorts ) :+
196196 Batch (" Decimal Optimizations" , fixedPoint,
197197 DecimalAggregates ) :+
198+ // This batch must run after "Decimal Optimizations", as that one may change the
199+ // aggregate distinct column
198200 Batch (" Distinct Aggregate Rewrite" , Once ,
199201 RewriteDistinctAggregates ) :+
200202 Batch (" Object Expressions Optimization" , fixedPoint,
Original file line number Diff line number Diff line change @@ -2557,16 +2557,12 @@ class DataFrameSuite extends QueryTest
25572557 }
25582558
25592559 test(" SPARK-32816: aggregating multiple distinct DECIMAL columns" ) {
2560- withTempPath { path =>
2561- spark.range(0 , 100 , 1 , 1 )
2562- .selectExpr(" id" , " cast(id as decimal(9, 0)) as decimal_col" )
2563- .write.mode(" overwrite" )
2564- .parquet(path.getAbsolutePath)
2565- spark.read.parquet(path.getAbsolutePath).createOrReplaceTempView(" test_table" )
2566- checkAnswer(
2567- sql(" select avg(distinct decimal_col), sum(distinct decimal_col) from test_table" ),
2568- Row (49.5 , 4950 ))
2569- }
2560+ spark.range(0 , 100 , 1 , 1 )
2561+ .selectExpr(" id" , " cast(id as decimal(9, 0)) as decimal_col" )
2562+ .createOrReplaceTempView(" test_table" )
2563+ checkAnswer(
2564+ sql(" select avg(distinct decimal_col), sum(distinct decimal_col) from test_table" ),
2565+ Row (49.5 , 4950 ))
25702566 }
25712567}
25722568
You can’t perform that action at this time.
0 commit comments