Skip to content

Commit f4cb5cf

Browse files
committed
Fix a few more tests
1 parent e2401f9 commit f4cb5cf

File tree

3 files changed

+23
-35
lines changed

3 files changed

+23
-35
lines changed

sql/core/src/test/scala/org/apache/spark/sql/AggregateHashMapSuite.scala

Lines changed: 15 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,12 @@ package org.apache.spark.sql
1919

2020
import org.scalatest.BeforeAndAfter
2121

22-
class SingleLevelAggregateHashMapSuite extends DataFrameAggregateSuite with BeforeAndAfter {
22+
import org.apache.spark.SparkConf
2323

24-
protected override def beforeAll(): Unit = {
25-
sparkConf.set("spark.sql.codegen.fallback", "false")
26-
sparkConf.set("spark.sql.codegen.aggregate.map.twolevel.enable", "false")
27-
super.beforeAll()
28-
}
24+
class SingleLevelAggregateHashMapSuite extends DataFrameAggregateSuite with BeforeAndAfter {
25+
override protected def sparkConf: SparkConf = super.sparkConf
26+
.set("spark.sql.codegen.fallback", "false")
27+
.set("spark.sql.codegen.aggregate.map.twolevel.enable", "false")
2928

3029
// adding some checking after each test is run, assuring that the configs are not changed
3130
// in test code
@@ -38,12 +37,9 @@ class SingleLevelAggregateHashMapSuite extends DataFrameAggregateSuite with Befo
3837
}
3938

4039
class TwoLevelAggregateHashMapSuite extends DataFrameAggregateSuite with BeforeAndAfter {
41-
42-
protected override def beforeAll(): Unit = {
43-
sparkConf.set("spark.sql.codegen.fallback", "false")
44-
sparkConf.set("spark.sql.codegen.aggregate.map.twolevel.enable", "true")
45-
super.beforeAll()
46-
}
40+
override protected def sparkConf: SparkConf = super.sparkConf
41+
.set("spark.sql.codegen.fallback", "false")
42+
.set("spark.sql.codegen.aggregate.map.twolevel.enable", "true")
4743

4844
// adding some checking after each test is run, assuring that the configs are not changed
4945
// in test code
@@ -55,15 +51,14 @@ class TwoLevelAggregateHashMapSuite extends DataFrameAggregateSuite with BeforeA
5551
}
5652
}
5753

58-
class TwoLevelAggregateHashMapWithVectorizedMapSuite extends DataFrameAggregateSuite with
59-
BeforeAndAfter {
54+
class TwoLevelAggregateHashMapWithVectorizedMapSuite
55+
extends DataFrameAggregateSuite
56+
with BeforeAndAfter {
6057

61-
protected override def beforeAll(): Unit = {
62-
sparkConf.set("spark.sql.codegen.fallback", "false")
63-
sparkConf.set("spark.sql.codegen.aggregate.map.twolevel.enable", "true")
64-
sparkConf.set("spark.sql.codegen.aggregate.map.vectorized.enable", "true")
65-
super.beforeAll()
66-
}
58+
override protected def sparkConf: SparkConf = super.sparkConf
59+
.set("spark.sql.codegen.fallback", "false")
60+
.set("spark.sql.codegen.aggregate.map.twolevel.enable", "true")
61+
.set("spark.sql.codegen.aggregate.map.vectorized.enable", "true")
6762

6863
// adding some checking after each test is run, assuring that the configs are not changed
6964
// in test code

sql/core/src/test/scala/org/apache/spark/sql/DatasetSerializerRegistratorSuite.scala

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,22 +20,20 @@ package org.apache.spark.sql
2020
import com.esotericsoftware.kryo.{Kryo, Serializer}
2121
import com.esotericsoftware.kryo.io.{Input, Output}
2222

23+
import org.apache.spark.SparkConf
2324
import org.apache.spark.serializer.KryoRegistrator
2425
import org.apache.spark.sql.test.SharedSQLContext
25-
import org.apache.spark.sql.test.TestSparkSession
2626

2727
/**
2828
* Test suite to test Kryo custom registrators.
2929
*/
3030
class DatasetSerializerRegistratorSuite extends QueryTest with SharedSQLContext {
3131
import testImplicits._
3232

33-
/**
34-
* Initialize the [[TestSparkSession]] with a [[KryoRegistrator]].
35-
*/
36-
protected override def beforeAll(): Unit = {
37-
sparkConf.set("spark.kryo.registrator", TestRegistrator().getClass.getCanonicalName)
38-
super.beforeAll()
33+
34+
override protected def sparkConf: SparkConf = {
35+
// Make sure we use the KryoRegistrator
36+
super.sparkConf.set("spark.kryo.registrator", TestRegistrator().getClass.getCanonicalName)
3937
}
4038

4139
test("Kryo registrator") {

sql/core/src/test/scala/org/apache/spark/sql/execution/DataSourceScanExecRedactionSuite.scala

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,22 +18,17 @@ package org.apache.spark.sql.execution
1818

1919
import org.apache.hadoop.fs.Path
2020

21+
import org.apache.spark.SparkConf
2122
import org.apache.spark.sql.QueryTest
2223
import org.apache.spark.sql.test.SharedSQLContext
23-
import org.apache.spark.util.Utils
2424

2525
/**
2626
* Suite that tests the redaction of DataSourceScanExec
2727
*/
2828
class DataSourceScanExecRedactionSuite extends QueryTest with SharedSQLContext {
2929

30-
import Utils._
31-
32-
override def beforeAll(): Unit = {
33-
sparkConf.set("spark.redaction.string.regex",
34-
"file:/[\\w_]+")
35-
super.beforeAll()
36-
}
30+
override protected def sparkConf: SparkConf = super.sparkConf
31+
.set("spark.redaction.string.regex", "file:/[\\w_]+")
3732

3833
test("treeString is redacted") {
3934
withTempDir { dir =>

0 commit comments

Comments
 (0)