Skip to content

Commit 4e0cf80

Browse files
committed
remove checks and operation
1 parent eb5c294 commit 4e0cf80

File tree

2 files changed

+6
-22
lines changed

2 files changed

+6
-22
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -939,23 +939,17 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog {
939939
* Validates the options used for alter table commands after table and columns are resolved.
940940
*/
941941
private def checkAlterTableCommand(alter: AlterTableCommand): Unit = {
942-
def checkColumnNotExists(fieldNames: Seq[String], struct: StructType): Unit = {
942+
def checkColumnNotExists(op: String, fieldNames: Seq[String], struct: StructType): Unit = {
943943
if (struct.findNestedField(fieldNames, includeCollections = true).isDefined) {
944-
alter.failAnalysis(s"Cannot ${alter.operation} column, because ${fieldNames.quoted} " +
944+
alter.failAnalysis(s"Cannot $op column, because ${fieldNames.quoted} " +
945945
s"already exists in ${struct.treeString}")
946946
}
947947
}
948948

949949
alter match {
950950
case AlterTableAddColumns(table: ResolvedTable, colsToAdd) =>
951951
colsToAdd.foreach { colToAdd =>
952-
colToAdd.fieldName match {
953-
case UnresolvedFieldName(name) =>
954-
alter.failAnalysis(s"Cannot ${alter.operation} missing field ${name.quoted} in " +
955-
s"${table.name} schema: ${table.schema.treeString}")
956-
case _ =>
957-
}
958-
checkColumnNotExists(colToAdd.name, table.schema)
952+
checkColumnNotExists("add", colToAdd.name, table.schema)
959953
}
960954

961955
case AlterTableReplaceColumns(table: ResolvedTable, colsToAdd) =>
@@ -968,12 +962,12 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog {
968962
// if column already exists.
969963
colsToAdd.foreach { colToAdd =>
970964
if (!colsToDelete.contains(colToAdd.name)) {
971-
checkColumnNotExists(colToAdd.name, table.schema)
965+
checkColumnNotExists("add", colToAdd.name, table.schema)
972966
}
973967
}
974968

975969
case AlterTableRenameColumn(table: ResolvedTable, col: ResolvedFieldName, newName) =>
976-
checkColumnNotExists(col.path :+ newName, table.schema)
970+
checkColumnNotExists("rename", col.path :+ newName, table.schema)
977971

978972
case a @ AlterTableAlterColumn(table: ResolvedTable, col: ResolvedFieldName, _, _, _, _) =>
979973
val fieldName = col.name.quoted

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1068,7 +1068,6 @@ case class UnsetTableProperties(
10681068

10691069
trait AlterTableCommand extends UnaryCommand {
10701070
def table: LogicalPlan
1071-
def operation: String
10721071
def changes: Seq[TableChange]
10731072
override def child: LogicalPlan = table
10741073
}
@@ -1085,11 +1084,10 @@ case class AlterTableAddColumns(
10851084
TypeUtils.failWithIntervalType(c.dataType)
10861085
}
10871086

1088-
override def operation: String = "add"
1089-
10901087
override def changes: Seq[TableChange] = {
10911088
columnsToAdd.map { col =>
10921089
require(col.fieldName.resolved)
1090+
require(col.position.isEmpty || col.position.get.resolved)
10931091
TableChange.addColumn(
10941092
col.name.toArray,
10951093
col.dataType,
@@ -1115,8 +1113,6 @@ case class AlterTableReplaceColumns(
11151113
TypeUtils.failWithIntervalType(c.dataType)
11161114
}
11171115

1118-
override def operation: String = "replace"
1119-
11201116
override def changes: Seq[TableChange] = {
11211117
// REPLACE COLUMNS deletes all the existing columns and adds new columns specified.
11221118
require(table.resolved)
@@ -1146,8 +1142,6 @@ case class AlterTableReplaceColumns(
11461142
case class AlterTableDropColumns(
11471143
table: LogicalPlan,
11481144
columnsToDrop: Seq[FieldName]) extends AlterTableCommand {
1149-
override def operation: String = "delete"
1150-
11511145
override def changes: Seq[TableChange] = {
11521146
columnsToDrop.map { col =>
11531147
require(col.resolved, "FieldName should be resolved before it's converted to TableChange.")
@@ -1166,8 +1160,6 @@ case class AlterTableRenameColumn(
11661160
table: LogicalPlan,
11671161
column: FieldName,
11681162
newName: String) extends AlterTableCommand {
1169-
override def operation: String = "rename"
1170-
11711163
override def changes: Seq[TableChange] = {
11721164
require(column.resolved, "FieldName should be resolved before it's converted to TableChange.")
11731165
Seq(TableChange.renameColumn(column.name.toArray, newName))
@@ -1190,8 +1182,6 @@ case class AlterTableAlterColumn(
11901182
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
11911183
dataType.foreach(failNullType)
11921184

1193-
override def operation: String = "update"
1194-
11951185
override def changes: Seq[TableChange] = {
11961186
require(column.resolved, "FieldName should be resolved before it's converted to TableChange.")
11971187
val colName = column.name.toArray

0 commit comments

Comments
 (0)