Skip to content

Commit f391721

Browse files
committed
update object name and packege include
1 parent 032c916 commit f391721

File tree

2 files changed

+28
-26
lines changed

2 files changed

+28
-26
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/CatalystErrors.scala renamed to sql/catalyst/src/main/scala/org/apache/spark/sql/QueryCompilationErrors.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.errors
1919

20+
import org.apache.spark.sql.AnalysisException
2021
import org.apache.spark.sql.catalyst.expressions.{Expression, GroupingID}
2122
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2223
import org.apache.spark.sql.catalyst.util.toPrettySQL

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 27 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import scala.collection.mutable
2525
import scala.collection.mutable.ArrayBuffer
2626
import scala.util.Random
2727

28-
import org.apache.spark.sql.{AnalysisException, CatalystErrors}
28+
import org.apache.spark.sql.AnalysisException
2929
import org.apache.spark.sql.catalyst._
3030
import org.apache.spark.sql.catalyst.catalog._
3131
import org.apache.spark.sql.catalyst.encoders.OuterScopes
@@ -42,6 +42,7 @@ import org.apache.spark.sql.connector.catalog._
4242
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
4343
import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, After, ColumnChange, ColumnPosition, DeleteColumn, RenameColumn, UpdateColumnComment, UpdateColumnNullability, UpdateColumnPosition, UpdateColumnType}
4444
import org.apache.spark.sql.connector.expressions.{FieldReference, IdentityTransform, Transform}
45+
import org.apache.spark.sql.errors.QueryCompilationErrors
4546
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
4647
import org.apache.spark.sql.internal.SQLConf
4748
import org.apache.spark.sql.internal.SQLConf.{PartitionOverwriteMode, StoreAssignmentPolicy}
@@ -443,15 +444,15 @@ class Analyzer(
443444
e.groupByExprs.map(_.canonicalized) == groupByExprs.map(_.canonicalized)) {
444445
Alias(gid, toPrettySQL(e))()
445446
} else {
446-
throw CatalystErrors.groupingIDMismatchError(e, groupByExprs)
447+
throw QueryCompilationErrors.groupingIDMismatchError(e, groupByExprs)
447448
}
448449
case e @ Grouping(col: Expression) =>
449450
val idx = groupByExprs.indexWhere(_.semanticEquals(col))
450451
if (idx >= 0) {
451452
Alias(Cast(BitwiseAnd(ShiftRight(gid, Literal(groupByExprs.length - 1 - idx)),
452453
Literal(1L)), ByteType), toPrettySQL(e))()
453454
} else {
454-
throw CatalystErrors.groupingColInvalidError(col, groupByExprs)
455+
throw QueryCompilationErrors.groupingColInvalidError(col, groupByExprs)
455456
}
456457
}
457458
}
@@ -567,7 +568,7 @@ class Analyzer(
567568
val finalGroupByExpressions = getFinalGroupByExpressions(selectedGroupByExprs, groupByExprs)
568569

569570
if (finalGroupByExpressions.size > GroupingID.dataType.defaultSize * 8) {
570-
throw CatalystErrors.groupingSizeTooLargeError(GroupingID.dataType.defaultSize * 8)
571+
throw QueryCompilationErrors.groupingSizeTooLargeError(GroupingID.dataType.defaultSize * 8)
571572
}
572573

573574
// Expand works by setting grouping expressions to null as determined by the
@@ -703,7 +704,7 @@ class Analyzer(
703704
|| !p.pivotColumn.resolved || !p.pivotValues.forall(_.resolved) => p
704705
case Pivot(groupByExprsOpt, pivotColumn, pivotValues, aggregates, child) =>
705706
if (!RowOrdering.isOrderable(pivotColumn.dataType)) {
706-
throw CatalystErrors.unorderablePivotColError(pivotColumn)
707+
throw QueryCompilationErrors.unorderablePivotColError(pivotColumn)
707708
}
708709
// Check all aggregate expressions.
709710
aggregates.foreach(checkValidAggregateExpression)
@@ -714,10 +715,10 @@ class Analyzer(
714715
case _ => value.foldable
715716
}
716717
if (!foldable) {
717-
throw CatalystErrors.nonliteralPivotValError(value)
718+
throw QueryCompilationErrors.nonliteralPivotValError(value)
718719
}
719720
if (!Cast.canCast(value.dataType, pivotColumn.dataType)) {
720-
throw CatalystErrors.pivotValDataTypeMismatchError(value, pivotColumn)
721+
throw QueryCompilationErrors.pivotValDataTypeMismatchError(value, pivotColumn)
721722
}
722723
Cast(value, pivotColumn.dataType, Some(conf.sessionLocalTimeZone)).eval(EmptyRow)
723724
}
@@ -1040,7 +1041,7 @@ class Analyzer(
10401041
case i @ InsertIntoStatement(r: DataSourceV2Relation, _, _, _, _) if i.query.resolved =>
10411042
// ifPartitionNotExists is append with validation, but validation is not supported
10421043
if (i.ifPartitionNotExists) {
1043-
throw CatalystErrors.unsupportedIfNotExistsError(r.table.name)
1044+
throw QueryCompilationErrors.unsupportedIfNotExistsError(r.table.name)
10441045
}
10451046

10461047
val partCols = partitionColumnNames(r.table)
@@ -1077,7 +1078,7 @@ class Analyzer(
10771078
partitionColumnNames.find(name => conf.resolver(name, partitionName)) match {
10781079
case Some(_) =>
10791080
case None =>
1080-
throw CatalystErrors.nonPartitionColError(partitionName)
1081+
throw QueryCompilationErrors.nonPartitionColError(partitionName)
10811082
}
10821083
}
10831084
}
@@ -1099,7 +1100,7 @@ class Analyzer(
10991100
case Some(attr) =>
11001101
attr.name -> staticName
11011102
case _ =>
1102-
throw CatalystErrors.addStaticValToUnknownColError(staticName)
1103+
throw QueryCompilationErrors.addStaticValToUnknownColError(staticName)
11031104
}).toMap
11041105

11051106
val queryColumns = query.output.iterator
@@ -1141,7 +1142,7 @@ class Analyzer(
11411142
// an UnresolvedAttribute.
11421143
EqualTo(UnresolvedAttribute(attr.name), Cast(Literal(value), attr.dataType))
11431144
case None =>
1144-
throw CatalystErrors.unknownStaticPartitionColError(name)
1145+
throw QueryCompilationErrors.unknownStaticPartitionColError(name)
11451146
}
11461147
}.reduce(And)
11471148
}
@@ -2354,19 +2355,19 @@ class Analyzer(
23542355
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
23552356
case Project(projectList, _) if projectList.exists(hasNestedGenerator) =>
23562357
val nestedGenerator = projectList.find(hasNestedGenerator).get
2357-
throw CatalystErrors.nestedGeneratorError(trimAlias(nestedGenerator))
2358+
throw QueryCompilationErrors.nestedGeneratorError(trimAlias(nestedGenerator))
23582359

23592360
case Project(projectList, _) if projectList.count(hasGenerator) > 1 =>
23602361
val generators = projectList.filter(hasGenerator).map(trimAlias)
2361-
throw CatalystErrors.moreThanOneGeneratorError(generators, "select")
2362+
throw QueryCompilationErrors.moreThanOneGeneratorError(generators, "select")
23622363

23632364
case Aggregate(_, aggList, _) if aggList.exists(hasNestedGenerator) =>
23642365
val nestedGenerator = aggList.find(hasNestedGenerator).get
2365-
throw CatalystErrors.nestedGeneratorError(trimAlias(nestedGenerator))
2366+
throw QueryCompilationErrors.nestedGeneratorError(trimAlias(nestedGenerator))
23662367

23672368
case Aggregate(_, aggList, _) if aggList.count(hasGenerator) > 1 =>
23682369
val generators = aggList.filter(hasGenerator).map(trimAlias)
2369-
throw CatalystErrors.moreThanOneGeneratorError(generators, "aggregate")
2370+
throw QueryCompilationErrors.moreThanOneGeneratorError(generators, "aggregate")
23702371

23712372
case agg @ Aggregate(groupList, aggList, child) if aggList.forall {
23722373
case AliasedGenerator(_, _, _) => true
@@ -2449,7 +2450,7 @@ class Analyzer(
24492450
case g: Generate => g
24502451

24512452
case p if p.expressions.exists(hasGenerator) =>
2452-
throw CatalystErrors.generatorOutsideSelectError(p)
2453+
throw QueryCompilationErrors.generatorOutsideSelectError(p)
24532454
}
24542455
}
24552456

@@ -3010,7 +3011,7 @@ class Analyzer(
30103011
private def validateStoreAssignmentPolicy(): Unit = {
30113012
// SPARK-28730: LEGACY store assignment policy is disallowed in data source v2.
30123013
if (conf.storeAssignmentPolicy == StoreAssignmentPolicy.LEGACY) {
3013-
throw CatalystErrors.legacyStoreAssignmentPolicyError()
3014+
throw QueryCompilationErrors.legacyStoreAssignmentPolicyError()
30143015
}
30153016
}
30163017

@@ -3023,12 +3024,12 @@ class Analyzer(
30233024
hint: JoinHint) = {
30243025
val leftKeys = joinNames.map { keyName =>
30253026
left.output.find(attr => resolver(attr.name, keyName)).getOrElse {
3026-
throw CatalystErrors.unresolvedUsingColForJoinError(keyName, left, "left")
3027+
throw QueryCompilationErrors.unresolvedUsingColForJoinError(keyName, left, "left")
30273028
}
30283029
}
30293030
val rightKeys = joinNames.map { keyName =>
30303031
right.output.find(attr => resolver(attr.name, keyName)).getOrElse {
3031-
throw CatalystErrors.unresolvedUsingColForJoinError(keyName, right, "right")
3032+
throw QueryCompilationErrors.unresolvedUsingColForJoinError(keyName, right, "right")
30323033
}
30333034
}
30343035
val joinPairs = leftKeys.zip(rightKeys)
@@ -3091,7 +3092,7 @@ class Analyzer(
30913092
ExtractValue(child, fieldName, resolver)
30923093
}
30933094
case other =>
3094-
throw CatalystErrors.dataTypeMismatchForDeserializerError(other,
3095+
throw QueryCompilationErrors.dataTypeMismatchForDeserializerError(other,
30953096
"array")
30963097
}
30973098
case u: UnresolvedCatalystToExternalMap if u.child.resolved =>
@@ -3102,7 +3103,7 @@ class Analyzer(
31023103
ExtractValue(child, fieldName, resolver)
31033104
}
31043105
case other =>
3105-
throw CatalystErrors.dataTypeMismatchForDeserializerError(other, "map")
3106+
throw QueryCompilationErrors.dataTypeMismatchForDeserializerError(other, "map")
31063107
}
31073108
}
31083109
validateNestedTupleFields(result)
@@ -3111,7 +3112,7 @@ class Analyzer(
31113112
}
31123113

31133114
private def fail(schema: StructType, maxOrdinal: Int): Unit = {
3114-
throw CatalystErrors.fieldNumberMismatchForDeserializerError(schema, maxOrdinal)
3115+
throw QueryCompilationErrors.fieldNumberMismatchForDeserializerError(schema, maxOrdinal)
31153116
}
31163117

31173118
/**
@@ -3170,7 +3171,7 @@ class Analyzer(
31703171
case n: NewInstance if n.childrenResolved && !n.resolved =>
31713172
val outer = OuterScopes.getOuterScope(n.cls)
31723173
if (outer == null) {
3173-
throw CatalystErrors.outerScopeFailureForNewInstanceError(n.cls.getName)
3174+
throw QueryCompilationErrors.outerScopeFailureForNewInstanceError(n.cls.getName)
31743175
}
31753176
n.copy(outerPointer = Some(outer))
31763177
}
@@ -3186,7 +3187,7 @@ class Analyzer(
31863187
case l: LambdaVariable => "array element"
31873188
case e => e.sql
31883189
}
3189-
throw CatalystErrors.upCastFailureError(fromStr, from, to, walkedTypePath)
3190+
throw QueryCompilationErrors.upCastFailureError(fromStr, from, to, walkedTypePath)
31903191
}
31913192

31923193
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
@@ -3197,7 +3198,7 @@ class Analyzer(
31973198
case u @ UpCast(child, _, _) if !child.resolved => u
31983199

31993200
case UpCast(_, target, _) if target != DecimalType && !target.isInstanceOf[DataType] =>
3200-
throw CatalystErrors.unsupportedAbstractDataTypeForUpCastError(target)
3201+
throw QueryCompilationErrors.unsupportedAbstractDataTypeForUpCastError(target)
32013202

32023203
case UpCast(child, target, walkedTypePath) if target == DecimalType
32033204
&& child.dataType.isInstanceOf[DecimalType] =>
@@ -3376,7 +3377,7 @@ class Analyzer(
33763377
case Some(colName) =>
33773378
ColumnPosition.after(colName)
33783379
case None =>
3379-
throw CatalystErrors.referenceColNotFoundForAlterTableChangesError(after,
3380+
throw QueryCompilationErrors.referenceColNotFoundForAlterTableChangesError(after,
33803381
parentName)
33813382
}
33823383
case other => other

0 commit comments

Comments
 (0)