Skip to content

Commit 6071926

Browse files
committed
rm unused hive_execution_version
1 parent d010442 commit 6071926

File tree

5 files changed

+20
-27
lines changed

5 files changed

+20
-27
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ private[hive] object SparkSQLEnv extends Logging {
5555
metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))
5656
metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8"))
5757
metadataHive.setError(new PrintStream(System.err, true, "UTF-8"))
58-
sparkSession.conf.set("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
5958
}
6059
}
6160

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLSessionManager.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ private[hive] class SparkSQLSessionManager(hiveServer: HiveServer2, sqlContext:
7777
} else {
7878
sqlContext.newSession()
7979
}
80-
ctx.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
8180
if (sessionConf != null && sessionConf.containsKey("use:database")) {
8281
ctx.sql(s"use ${sessionConf.get("use:database")}")
8382
}

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -155,10 +155,10 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
155155

156156
test("Checks Hive version") {
157157
withJdbcStatement() { statement =>
158-
val resultSet = statement.executeQuery("SET spark.sql.hive.version")
158+
val resultSet = statement.executeQuery("SET spark.sql.hive.metastore.version")
159159
resultSet.next()
160-
assert(resultSet.getString(1) === "spark.sql.hive.version")
161-
assert(resultSet.getString(2) === HiveUtils.hiveExecutionVersion)
160+
assert(resultSet.getString(1) === "spark.sql.hive.metastore.version")
161+
assert(resultSet.getString(2) === HiveUtils.builtinHiveVersion)
162162
}
163163
}
164164

@@ -521,7 +521,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
521521
conf += resultSet.getString(1) -> resultSet.getString(2)
522522
}
523523

524-
assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
524+
assert(conf.get("spark.sql.hive.metastore.version") === Some("1.2.1"))
525525
}
526526
}
527527

@@ -534,7 +534,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
534534
conf += resultSet.getString(1) -> resultSet.getString(2)
535535
}
536536

537-
assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
537+
assert(conf.get("spark.sql.hive.metastore.version") === Some("1.2.1"))
538538
}
539539
}
540540

@@ -721,10 +721,10 @@ class HiveThriftHttpServerSuite extends HiveThriftJdbcTest {
721721

722722
test("Checks Hive version") {
723723
withJdbcStatement() { statement =>
724-
val resultSet = statement.executeQuery("SET spark.sql.hive.version")
724+
val resultSet = statement.executeQuery("SET spark.sql.hive.metastore.version")
725725
resultSet.next()
726-
assert(resultSet.getString(1) === "spark.sql.hive.version")
727-
assert(resultSet.getString(2) === HiveUtils.hiveExecutionVersion)
726+
assert(resultSet.getString(1) === "spark.sql.hive.metastore.version")
727+
assert(resultSet.getString(2) === HiveUtils.builtinHiveVersion)
728728
}
729729
}
730730
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 10 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -58,28 +58,23 @@ private[spark] object HiveUtils extends Logging {
5858
}
5959

6060
/** The version of hive used internally by Spark SQL. */
61-
val hiveExecutionVersion: String = "1.2.1"
61+
val builtinHiveVersion: String = "1.2.1"
6262

6363
val HIVE_METASTORE_VERSION = buildConf("spark.sql.hive.metastore.version")
6464
.doc("Version of the Hive metastore. Available options are " +
65-
s"<code>0.12.0</code> through <code>$hiveExecutionVersion</code>.")
65+
s"<code>0.12.0</code> through <code>$builtinHiveVersion</code>.")
6666
.stringConf
67-
.createWithDefault(hiveExecutionVersion)
68-
69-
val HIVE_EXECUTION_VERSION = buildConf("spark.sql.hive.version")
70-
.doc("Version of Hive used internally by Spark SQL.")
71-
.stringConf
72-
.createWithDefault(hiveExecutionVersion)
67+
.createWithDefault(builtinHiveVersion)
7368

7469
val HIVE_METASTORE_JARS = buildConf("spark.sql.hive.metastore.jars")
7570
.doc(s"""
7671
| Location of the jars that should be used to instantiate the HiveMetastoreClient.
7772
| This property can be one of three options: "
7873
| 1. "builtin"
79-
| Use Hive ${hiveExecutionVersion}, which is bundled with the Spark assembly when
74+
| Use Hive ${builtinHiveVersion}, which is bundled with the Spark assembly when
8075
| <code>-Phive</code> is enabled. When this option is chosen,
8176
| <code>spark.sql.hive.metastore.version</code> must be either
82-
| <code>${hiveExecutionVersion}</code> or not defined.
77+
| <code>${builtinHiveVersion}</code> or not defined.
8378
| 2. "maven"
8479
| Use Hive jars of specified version downloaded from Maven repositories.
8580
| 3. A classpath in the standard format for both Hive and Hadoop.
@@ -259,9 +254,9 @@ private[spark] object HiveUtils extends Logging {
259254
protected[hive] def newClientForExecution(
260255
conf: SparkConf,
261256
hadoopConf: Configuration): HiveClientImpl = {
262-
logInfo(s"Initializing execution hive, version $hiveExecutionVersion")
257+
logInfo(s"Initializing execution hive, version $builtinHiveVersion")
263258
val loader = new IsolatedClientLoader(
264-
version = IsolatedClientLoader.hiveVersion(hiveExecutionVersion),
259+
version = IsolatedClientLoader.hiveVersion(builtinHiveVersion),
265260
sparkConf = conf,
266261
execJars = Seq.empty,
267262
hadoopConf = hadoopConf,
@@ -297,12 +292,12 @@ private[spark] object HiveUtils extends Logging {
297292
val metaVersion = IsolatedClientLoader.hiveVersion(hiveMetastoreVersion)
298293

299294
val isolatedLoader = if (hiveMetastoreJars == "builtin") {
300-
if (hiveExecutionVersion != hiveMetastoreVersion) {
295+
if (builtinHiveVersion != hiveMetastoreVersion) {
301296
throw new IllegalArgumentException(
302297
"Builtin jars can only be used when hive execution version == hive metastore version. " +
303-
s"Execution: $hiveExecutionVersion != Metastore: $hiveMetastoreVersion. " +
298+
s"Execution: $builtinHiveVersion != Metastore: $hiveMetastoreVersion. " +
304299
"Specify a vaild path to the correct hive jars using $HIVE_METASTORE_JARS " +
305-
s"or change ${HIVE_METASTORE_VERSION.key} to $hiveExecutionVersion.")
300+
s"or change ${HIVE_METASTORE_VERSION.key} to $builtinHiveVersion.")
306301
}
307302

308303
// We recursively find all jars in the class loader chain,

sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,15 +73,15 @@ class VersionsSuite extends SparkFunSuite with Logging {
7373
}
7474

7575
test("success sanity check") {
76-
val badClient = buildClient(HiveUtils.hiveExecutionVersion, new Configuration())
76+
val badClient = buildClient(HiveUtils.builtinHiveVersion, new Configuration())
7777
val db = new CatalogDatabase("default", "desc", new URI("loc"), Map())
7878
badClient.createDatabase(db, ignoreIfExists = true)
7979
}
8080

8181
test("hadoop configuration preserved") {
8282
val hadoopConf = new Configuration()
8383
hadoopConf.set("test", "success")
84-
val client = buildClient(HiveUtils.hiveExecutionVersion, hadoopConf)
84+
val client = buildClient(HiveUtils.builtinHiveVersion, hadoopConf)
8585
assert("success" === client.getConf("test", null))
8686
}
8787

0 commit comments

Comments
 (0)