Skip to content

Commit a0322e6

Browse files
committed
Use SparkSession(SparkContext(...)) to prevent for Spark conf to affect other tests
1 parent 90b10b4 commit a0322e6

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

python/pyspark/sql/tests/test_arrow.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import unittest
2323
import warnings
2424

25+
from pyspark import SparkContext, SparkConf
2526
from pyspark.sql import Row, SparkSession
2627
from pyspark.sql.functions import udf
2728
from pyspark.sql.types import *
@@ -430,11 +431,8 @@ class MaxResultArrowTests(unittest.TestCase):
430431

431432
@classmethod
432433
def setUpClass(cls):
433-
cls.spark = SparkSession.builder \
434-
.master("local[4]") \
435-
.appName(cls.__name__) \
436-
.config("spark.driver.maxResultSize", "10k") \
437-
.getOrCreate()
434+
cls.spark = SparkSession(SparkContext(
435+
'local[4]', cls.__name__, conf=SparkConf().set("spark.driver.maxResultSize", "10k")))
438436

439437
# Explicitly enable Arrow and disable fallback.
440438
cls.spark.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")

0 commit comments

Comments
 (0)