@@ -39,9 +39,8 @@ class ConfigBehaviorSuite extends QueryTest with SharedSparkSession {
3939 def computeChiSquareTest (): Double = {
4040 val n = 10000
4141 // Trigger a sort
42- // Range has range partitioning in its output now. To have a range shuffle, we
43- // need to run a repartition first.
44- val data = spark.range(0 , n, 1 , 1 ).repartition(10 ).sort($" id" .desc)
42+ // Range has range partitioning in its output now.
43+ val data = spark.range(0 , n, 1 , 10 ).sort($" id" .desc)
4544 .selectExpr(" SPARK_PARTITION_ID() pid" , " id" ).as[(Int , Long )].collect()
4645
4746 // Compute histogram for the number of records per partition post sort
@@ -55,12 +54,12 @@ class ConfigBehaviorSuite extends QueryTest with SharedSparkSession {
5554
5655 withSQLConf(SQLConf .SHUFFLE_PARTITIONS .key -> numPartitions.toString) {
5756 // The default chi-sq value should be low
58- assert(computeChiSquareTest() < 100 )
57+ assert(computeChiSquareTest() < 10 )
5958
6059 withSQLConf(SQLConf .RANGE_EXCHANGE_SAMPLE_SIZE_PER_PARTITION .key -> " 1" ) {
6160 // If we only sample one point, the range boundaries will be pretty bad and the
6261 // chi-sq value would be very high.
63- assert(computeChiSquareTest() > 300 )
62+ assert(computeChiSquareTest() > 100 )
6463 }
6564 }
6665 }
0 commit comments