Skip to content

Commit 825c0ad

Browse files
committed
rename a conf name
1 parent aebdfc6 commit 825c0ad

File tree

3 files changed

+5
-7
lines changed

3 files changed

+5
-7
lines changed

sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -673,8 +673,8 @@ object SQLConf {
673673
.createWithDefault(TimeZone.getDefault().getID())
674674

675675
// for test
676-
val HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH =
677-
buildConf("spark.hive.createTable.defaultDB.location.useWarehousePath")
676+
val TEST_HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH =
677+
buildConf("spark.hive.test.createTable.defaultDB.location.useWarehousePath")
678678
.doc("Enables test case to use warehouse path instead of db location when " +
679679
"create table in default database.")
680680
.booleanConf

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
410410
// if the database is default, the value of WAREHOUSE_PATH in conf returned
411411
private def defaultTablePath(tableIdent: TableIdentifier): String = {
412412
val dbLocation = if (tableIdent.database.get == SessionCatalog.DEFAULT_DATABASE
413-
|| conf.get(SQLConf.HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH)) {
413+
|| conf.get(SQLConf.TEST_HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH)) {
414414
conf.get(WAREHOUSE_PATH)
415415
} else {
416416
getDatabase(tableIdent.database.get).locationUri

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1592,7 +1592,7 @@ class HiveDDLSuite
15921592
withTable("t") {
15931593
withTempDir { dir =>
15941594
spark.sparkContext.conf
1595-
.set(SQLConf.HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH.key, "true")
1595+
.set(SQLConf.TEST_HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH.key, "true")
15961596

15971597
spark.sql(s"CREATE DATABASE default_test LOCATION '$dir'" )
15981598
val db = spark.sessionState.catalog.getDatabaseMetadata("default_test")
@@ -1606,7 +1606,7 @@ class HiveDDLSuite
16061606

16071607
// clear
16081608
spark.sparkContext.conf
1609-
.remove(SQLConf.HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH.key)
1609+
.remove(SQLConf.TEST_HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH.key)
16101610
spark.sql("DROP TABLE t")
16111611
spark.sql("DROP DATABASE default_test")
16121612
spark.sql("USE DEFAULT")
@@ -1624,8 +1624,6 @@ class HiveDDLSuite
16241624
assert(table.location.stripSuffix("/") == s"${db.locationUri.stripSuffix("/")}/t" )
16251625

16261626
// clear
1627-
spark.sparkContext.conf
1628-
.remove(SQLConf.HIVE_CREATETABLE_DEFAULTDB_USEWAREHOUSE_PATH.key)
16291627
spark.sql("DROP TABLE t")
16301628
spark.sql("DROP DATABASE test_not_default")
16311629
spark.sql("USE DEFAULT")

0 commit comments

Comments
 (0)