Skip to content

Commit bf306da

Browse files
committed
review comment
1 parent 0aa8539 commit bf306da

File tree

5 files changed

+3
-4
lines changed

5 files changed

+3
-4
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.catalyst.catalog
1919

20-
import org.apache.spark.sql.AnalysisException
2120
import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException, NoSuchTableException}
2221
import org.apache.spark.sql.catalyst.expressions.Expression
2322
import org.apache.spark.sql.types.StructType

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,6 @@ class InMemoryCatalog(
309309
schema: StructType): Unit = synchronized {
310310
requireTableExists(db, table)
311311
val origTable = catalog(db).tables(table).table
312-
313312
catalog(db).tables(table).table = origTable.copy(schema = schema)
314313
}
315314

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -344,6 +344,7 @@ class SessionCatalog(
344344
|not present in the new schema. We don't support dropping columns yet.
345345
""".stripMargin)
346346
}
347+
347348
// assuming the newSchema has all partition columns at the end as required
348349
externalCatalog.alterTableSchema(db, table, newSchema)
349350
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -632,11 +632,9 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
632632
val rawTable = getRawTable(db, table)
633633
val withNewSchema = rawTable.copy(schema = schema)
634634
verifyColumnNames(withNewSchema)
635-
636635
// Add table metadata such as table schema, partition columns, etc. to table properties.
637636
val updatedTable = withNewSchema.copy(
638637
properties = withNewSchema.properties ++ tableMetaToTableProps(withNewSchema))
639-
640638
try {
641639
client.alterTable(updatedTable)
642640
} catch {

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,8 @@ private[hive] class HiveClientImpl(
425425
},
426426
schema = schema,
427427
partitionColumnNames = partCols.map(_.name),
428+
// For data source tables, we will always overwrite the bucket spec in
429+
// HiveExternalCatalog with the bucketing information in table properties.
428430
bucketSpec = bucketSpec,
429431
owner = h.getOwner,
430432
createTime = h.getTTable.getCreateTime.toLong * 1000,

0 commit comments

Comments
 (0)