@@ -26,7 +26,6 @@ import scala.collection.mutable.ArrayBuffer
2626import org .apache .commons .logging .LogFactory
2727import org .apache .hadoop .hive .conf .HiveConf
2828import org .apache .hadoop .hive .conf .HiveConf .ConfVars
29- import org .apache .hadoop .hive .ql .session .SessionState
3029import org .apache .hive .service .cli .thrift .{ThriftBinaryCLIService , ThriftHttpCLIService }
3130import org .apache .hive .service .server .{HiveServer2 , HiveServerServerOptionsProcessor }
3231
@@ -35,7 +34,7 @@ import org.apache.spark.annotation.DeveloperApi
3534import org .apache .spark .internal .Logging
3635import org .apache .spark .scheduler .{SparkListener , SparkListenerApplicationEnd , SparkListenerJobStart }
3736import org .apache .spark .sql .SQLContext
38- import org .apache .spark .sql .hive .HiveSessionState
37+ import org .apache .spark .sql .hive .{ HiveSharedState , HiveUtils }
3938import org .apache .spark .sql .hive .thriftserver .ReflectionUtils ._
4039import org .apache .spark .sql .hive .thriftserver .ui .ThriftServerTab
4140import org .apache .spark .sql .internal .SQLConf
@@ -56,9 +55,13 @@ object HiveThriftServer2 extends Logging {
5655 */
5756 @ DeveloperApi
5857 def startWithContext (sqlContext : SQLContext ): Unit = {
58+ val hadoopConf = sqlContext.sessionState.newHadoopConf()
59+ HiveUtils .newTemporaryConfiguration(useInMemoryDerby = true ).foreach { case (k, v) =>
60+ hadoopConf.set(k, v)
61+ }
62+
5963 val server = new HiveThriftServer2 (sqlContext)
60- val hiveConf = new HiveConf (sqlContext.sessionState.newHadoopConf(), classOf [SessionState ])
61- server.init(hiveConf)
64+ server.init(SparkSQLEnv .sqlContext.sharedState.asInstanceOf [HiveSharedState ].executionHive.conf)
6265 server.start()
6366 listener = new HiveThriftServer2Listener (server, sqlContext.conf)
6467 sqlContext.sparkContext.addSparkListener(listener)
@@ -86,8 +89,8 @@ object HiveThriftServer2 extends Logging {
8689
8790 try {
8891 val server = new HiveThriftServer2 (SparkSQLEnv .sqlContext)
89- server.init(new HiveConf (
90- SparkSQLEnv .sqlContext.sessionState.newHadoopConf(), classOf [ SessionState ]) )
92+ server.init(
93+ SparkSQLEnv .sqlContext.sharedState. asInstanceOf [ HiveSharedState ].executionHive.conf )
9194 server.start()
9295 logInfo(" HiveThriftServer2 started" )
9396 listener = new HiveThriftServer2Listener (server, SparkSQLEnv .sqlContext.conf)
0 commit comments