@@ -40,7 +40,8 @@ import org.apache.spark.util.{MutableURLClassLoader, Utils}
4040/**
4141 * A class that holds all state shared across sessions in a given [[SQLContext ]].
4242 */
43- private [sql] class SharedState (val sparkContext : SparkContext ) extends Logging {
43+ private [sql] class SharedState (val sparkContext : SparkContext , initConfig : Map [String , String ])
44+ extends Logging {
4445
4546 // Load hive-site.xml into hadoopConf and determine the warehouse path we want to use, based on
4647 // the config from both hive and Spark SQL. Finally set the warehouse config value to sparkConf.
@@ -101,7 +102,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
101102 */
102103 lazy val externalCatalog : ExternalCatalogWithListener = {
103104 val externalCatalog = SharedState .reflect[ExternalCatalog , SparkConf , Configuration ](
104- SharedState .externalCatalogClassName(sparkContext.conf),
105+ SharedState .externalCatalogClassName(sparkContext.conf, initConfig ),
105106 sparkContext.conf,
106107 sparkContext.hadoopConfiguration)
107108
@@ -165,8 +166,11 @@ object SharedState extends Logging {
165166
166167 private val HIVE_EXTERNAL_CATALOG_CLASS_NAME = " org.apache.spark.sql.hive.HiveExternalCatalog"
167168
168- private def externalCatalogClassName (conf : SparkConf ): String = {
169- conf.get(CATALOG_IMPLEMENTATION ) match {
169+ private def externalCatalogClassName (
170+ conf : SparkConf ,
171+ initSessionConfig : Map [String , String ]): String = {
172+ initSessionConfig
173+ .getOrElse(CATALOG_IMPLEMENTATION .key, conf.get(CATALOG_IMPLEMENTATION )) match {
170174 case " hive" => HIVE_EXTERNAL_CATALOG_CLASS_NAME
171175 case " in-memory" => classOf [InMemoryCatalog ].getCanonicalName
172176 }
0 commit comments