Skip to content

Commit dad7095

Browse files
committed
Pass execution hive directly
1 parent 8dfac81 commit dad7095

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import scala.collection.mutable.ArrayBuffer
2626
import org.apache.commons.logging.LogFactory
2727
import org.apache.hadoop.hive.conf.HiveConf
2828
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
29-
import org.apache.hadoop.hive.ql.session.SessionState
3029
import org.apache.hive.service.cli.thrift.{ThriftBinaryCLIService, ThriftHttpCLIService}
3130
import org.apache.hive.service.server.{HiveServer2, HiveServerServerOptionsProcessor}
3231

@@ -35,7 +34,7 @@ import org.apache.spark.annotation.DeveloperApi
3534
import org.apache.spark.internal.Logging
3635
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd, SparkListenerJobStart}
3736
import org.apache.spark.sql.SQLContext
38-
import org.apache.spark.sql.hive.HiveSessionState
37+
import org.apache.spark.sql.hive.{HiveSharedState, HiveUtils}
3938
import org.apache.spark.sql.hive.thriftserver.ReflectionUtils._
4039
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab
4140
import org.apache.spark.sql.internal.SQLConf
@@ -56,9 +55,13 @@ object HiveThriftServer2 extends Logging {
5655
*/
5756
@DeveloperApi
5857
def startWithContext(sqlContext: SQLContext): Unit = {
58+
val hadoopConf = sqlContext.sessionState.newHadoopConf()
59+
HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
60+
hadoopConf.set(k, v)
61+
}
62+
5963
val server = new HiveThriftServer2(sqlContext)
60-
val hiveConf = new HiveConf(sqlContext.sessionState.newHadoopConf(), classOf[SessionState])
61-
server.init(hiveConf)
64+
server.init(SparkSQLEnv.sqlContext.sharedState.asInstanceOf[HiveSharedState].executionHive.conf)
6265
server.start()
6366
listener = new HiveThriftServer2Listener(server, sqlContext.conf)
6467
sqlContext.sparkContext.addSparkListener(listener)
@@ -86,8 +89,8 @@ object HiveThriftServer2 extends Logging {
8689

8790
try {
8891
val server = new HiveThriftServer2(SparkSQLEnv.sqlContext)
89-
server.init(new HiveConf(
90-
SparkSQLEnv.sqlContext.sessionState.newHadoopConf(), classOf[SessionState]))
92+
server.init(
93+
SparkSQLEnv.sqlContext.sharedState.asInstanceOf[HiveSharedState].executionHive.conf)
9194
server.start()
9295
logInfo("HiveThriftServer2 started")
9396
listener = new HiveThriftServer2Listener(server, SparkSQLEnv.sqlContext.conf)

0 commit comments

Comments
 (0)