Skip to content

Commit 5e56097

Browse files
committed
Remove TreeNodeException that not work
1 parent 59cbaca commit 5e56097

File tree

18 files changed

+44
-95
lines changed

18 files changed

+44
-95
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package org.apache.spark.sql.catalyst.analysis
1919

20+
import org.apache.spark.sql.AnalysisException
2021
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
21-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2222
import org.apache.spark.sql.catalyst.expressions._
2323
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
2424
import org.apache.spark.sql.catalyst.parser.ParserUtils
@@ -35,7 +35,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap
3535
* resolved.
3636
*/
3737
class UnresolvedException[TreeType <: TreeNode[_]](tree: TreeType, function: String)
38-
extends TreeNodeException(tree, s"Invalid call to $function on unresolved object", null)
38+
extends AnalysisException(s"Invalid call to $function on unresolved object")
3939

4040
/**
4141
* Holds the name of a relation that has yet to be looked up in a catalog.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -17,43 +17,9 @@
1717

1818
package org.apache.spark.sql.catalyst
1919

20-
import scala.util.control.NonFatal
21-
22-
import org.apache.spark.SparkException
23-
import org.apache.spark.sql.catalyst.trees.TreeNode
24-
2520
/**
2621
* Functions for attaching and retrieving trees that are associated with errors.
2722
*/
2823
package object errors {
2924

30-
class TreeNodeException[TreeType <: TreeNode[_]](
31-
@transient val tree: TreeType,
32-
msg: String,
33-
cause: Throwable)
34-
extends Exception(msg, cause) {
35-
36-
val treeString = tree.toString
37-
38-
// Yes, this is the same as a default parameter, but... those don't seem to work with SBT
39-
// external project dependencies for some reason.
40-
def this(tree: TreeType, msg: String) = this(tree, msg, null)
41-
42-
override def getMessage: String = {
43-
s"${super.getMessage}, tree:${if (treeString contains "\n") "\n" else " "}$tree"
44-
}
45-
}
46-
47-
/**
48-
* Wraps any exceptions that are thrown while executing `f` in a
49-
* [[catalyst.errors.TreeNodeException TreeNodeException]], attaching the provided `tree`.
50-
*/
51-
def attachTree[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: => A): A = {
52-
try f catch {
53-
// SPARK-16748: We do not want SparkExceptions from job failures in the planning phase
54-
// to create TreeNodeException. Hence, wrap exception only if it is not SparkException.
55-
case NonFatal(e) if !e.isInstanceOf[SparkException] =>
56-
throw new TreeNodeException(tree, msg, e)
57-
}
58-
}
5925
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.catalyst.expressions
1919

2020
import org.apache.spark.internal.Logging
2121
import org.apache.spark.sql.catalyst.InternalRow
22-
import org.apache.spark.sql.catalyst.errors.attachTree
2322
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode, FalseLiteral, JavaCode}
2423
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
2524
import org.apache.spark.sql.types._
@@ -72,17 +71,15 @@ object BindReferences extends Logging {
7271
input: AttributeSeq,
7372
allowFailures: Boolean = false): A = {
7473
expression.transform { case a: AttributeReference =>
75-
attachTree(a, "Binding attribute") {
76-
val ordinal = input.indexOf(a.exprId)
77-
if (ordinal == -1) {
78-
if (allowFailures) {
79-
a
80-
} else {
81-
sys.error(s"Couldn't find $a in ${input.attrs.mkString("[", ",", "]")}")
82-
}
74+
val ordinal = input.indexOf(a.exprId)
75+
if (ordinal == -1) {
76+
if (allowFailures) {
77+
a
8378
} else {
84-
BoundReference(ordinal, a.dataType, input(ordinal).nullable)
79+
sys.error(s"Couldn't find $a in ${input.attrs.mkString("[", ",", "]")}")
8580
}
81+
} else {
82+
BoundReference(ordinal, a.dataType, input(ordinal).nullable)
8683
}
8784
}.asInstanceOf[A] // Kind of a hack, but safe. TODO: Tighten return type when possible.
8885
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
package org.apache.spark.sql.catalyst.rules
1919

2020
import org.apache.spark.internal.Logging
21+
import org.apache.spark.sql.AnalysisException
2122
import org.apache.spark.sql.catalyst.QueryPlanningTracker
22-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2323
import org.apache.spark.sql.catalyst.trees.TreeNode
2424
import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND
2525
import org.apache.spark.sql.catalyst.util.sideBySide
@@ -169,7 +169,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
169169
|Once strategy's idempotence is broken for batch ${batch.name}
170170
|${sideBySide(plan.treeString, reOptimized.treeString).mkString("\n")}
171171
""".stripMargin
172-
throw new TreeNodeException(reOptimized, message, null)
172+
throw new AnalysisException(message)
173173
}
174174
}
175175

@@ -199,7 +199,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
199199
if (!isPlanIntegral(plan)) {
200200
val message = "The structural integrity of the input plan is broken in " +
201201
s"${this.getClass.getName.stripSuffix("$")}."
202-
throw new TreeNodeException(plan, message, null)
202+
throw new AnalysisException(message)
203203
}
204204

205205
batches.foreach { batch =>
@@ -232,7 +232,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
232232
if (effective && !isPlanIntegral(result)) {
233233
val message = s"After applying rule ${rule.ruleName} in batch ${batch.name}, " +
234234
"the structural integrity of the plan is broken."
235-
throw new TreeNodeException(result, message, null)
235+
throw new AnalysisException(message)
236236
}
237237

238238
result
@@ -249,7 +249,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
249249
val message = s"Max iterations (${iteration - 1}) reached for batch ${batch.name}" +
250250
s"$endingMsg"
251251
if (Utils.isTesting || batch.strategy.errorOnExceed) {
252-
throw new TreeNodeException(curPlan, message, null)
252+
throw new AnalysisException(message)
253253
} else {
254254
logWarning(message)
255255
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@ import org.json4s.JsonAST._
2727
import org.json4s.JsonDSL._
2828
import org.json4s.jackson.JsonMethods._
2929

30+
import org.apache.spark.sql.AnalysisException
3031
import org.apache.spark.sql.catalyst.{AliasIdentifier, IdentifierWithDatabase}
3132
import org.apache.spark.sql.catalyst.ScalaReflection._
3233
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource}
33-
import org.apache.spark.sql.catalyst.errors._
3434
import org.apache.spark.sql.catalyst.expressions._
3535
import org.apache.spark.sql.catalyst.plans.JoinType
3636
import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, Partitioning}
@@ -465,7 +465,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
465465
*/
466466
private def makeCopy(
467467
newArgs: Array[AnyRef],
468-
allowEmptyArgs: Boolean): BaseType = attachTree(this, "makeCopy") {
468+
allowEmptyArgs: Boolean): BaseType = {
469469
val allCtors = getClass.getConstructors
470470
if (newArgs.isEmpty && allCtors.isEmpty) {
471471
// This is a singleton object which doesn't have any constructor. Just return `this` as we
@@ -504,8 +504,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
504504
}
505505
} catch {
506506
case e: java.lang.IllegalArgumentException =>
507-
throw new TreeNodeException(
508-
this,
507+
throw new AnalysisException(
509508
s"""
510509
|Failed to copy node.
511510
|Is otherCopyArgs specified correctly for $nodeName.

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable
3333
import org.apache.spark.sql.catalyst.dsl.expressions._
3434
import org.apache.spark.sql.catalyst.dsl.plans._
3535
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
36-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
3736
import org.apache.spark.sql.catalyst.expressions._
3837
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Count, Sum}
3938
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
@@ -799,7 +798,7 @@ class AnalysisSuite extends AnalysisTest with Matchers {
799798
$"a" / $"d" as "div4",
800799
$"e" / $"e" as "div5")
801800

802-
val message = intercept[TreeNodeException[LogicalPlan]] {
801+
val message = intercept[AnalysisException] {
803802
testAnalyzer.execute(plan)
804803
}.getMessage
805804
assert(message.startsWith(s"Max iterations ($maxIterations) reached for batch Resolution, " +
@@ -961,7 +960,7 @@ class AnalysisSuite extends AnalysisTest with Matchers {
961960
$"a" / $"d" as "div4",
962961
$"e" / $"e" as "div5")
963962

964-
val message1 = intercept[TreeNodeException[LogicalPlan]] {
963+
val message1 = intercept[AnalysisException] {
965964
testAnalyzer.execute(plan)
966965
}.getMessage
967966
assert(message1.startsWith(s"Max iterations ($maxIterations) reached for batch Resolution, " +
@@ -971,13 +970,13 @@ class AnalysisSuite extends AnalysisTest with Matchers {
971970
try {
972971
testAnalyzer.execute(plan)
973972
} catch {
974-
case ex: TreeNodeException[_]
973+
case ex: AnalysisException
975974
if ex.getMessage.contains(SQLConf.ANALYZER_MAX_ITERATIONS.key) =>
976975
fail("analyzer.execute should not reach max iterations.")
977976
}
978977
}
979978

980-
val message2 = intercept[TreeNodeException[LogicalPlan]] {
979+
val message2 = intercept[AnalysisException] {
981980
testAnalyzer.execute(plan)
982981
}.getMessage
983982
assert(message2.startsWith(s"Max iterations ($maxIterations) reached for batch Resolution, " +

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerStructuralIntegrityCheckerSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@
1717

1818
package org.apache.spark.sql.catalyst.optimizer
1919

20+
import org.apache.spark.sql.AnalysisException
2021
import org.apache.spark.sql.catalyst.analysis.{EmptyFunctionRegistry, FakeV2SessionCatalog, UnresolvedAttribute}
2122
import org.apache.spark.sql.catalyst.catalog.{InMemoryCatalog, SessionCatalog}
2223
import org.apache.spark.sql.catalyst.dsl.expressions._
2324
import org.apache.spark.sql.catalyst.dsl.plans._
24-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2525
import org.apache.spark.sql.catalyst.expressions.{Alias, Literal, NamedExpression}
2626
import org.apache.spark.sql.catalyst.plans.PlanTest
2727
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LocalRelation, LogicalPlan, OneRowRelation, Project}
@@ -53,7 +53,7 @@ class OptimizerStructuralIntegrityCheckerSuite extends PlanTest {
5353
test("check for invalid plan after execution of rule - unresolved attribute") {
5454
val analyzed = Project(Alias(Literal(10), "attr")() :: Nil, OneRowRelation()).analyze
5555
assert(analyzed.resolved)
56-
val message = intercept[TreeNodeException[LogicalPlan]] {
56+
val message = intercept[AnalysisException] {
5757
Optimize.execute(analyzed)
5858
}.getMessage
5959
val ruleName = OptimizeRuleBreakSI.ruleName
@@ -68,7 +68,7 @@ class OptimizerStructuralIntegrityCheckerSuite extends PlanTest {
6868
assert(analyzed.resolved)
6969

7070
// Should fail verification with the OptimizeRuleBreakSI rule
71-
val message = intercept[TreeNodeException[LogicalPlan]] {
71+
val message = intercept[AnalysisException] {
7272
Optimize.execute(analyzed)
7373
}.getMessage
7474
val ruleName = OptimizeRuleBreakSI.ruleName
@@ -86,7 +86,7 @@ class OptimizerStructuralIntegrityCheckerSuite extends PlanTest {
8686
val invalidPlan = OptimizeRuleBreakSI.apply(analyzed)
8787

8888
// Should fail verification right at the beginning
89-
val message = intercept[TreeNodeException[LogicalPlan]] {
89+
val message = intercept[AnalysisException] {
9090
Optimize.execute(invalidPlan)
9191
}.getMessage
9292
assert(message.contains("The structural integrity of the input plan is broken"))

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package org.apache.spark.sql.catalyst.optimizer
1919

20+
import org.apache.spark.sql.AnalysisException
2021
import org.apache.spark.sql.catalyst.dsl.plans._
21-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2222
import org.apache.spark.sql.catalyst.expressions.{Alias, IntegerLiteral, Literal}
2323
import org.apache.spark.sql.catalyst.plans.PlanTest
2424
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation, Project}
@@ -48,7 +48,7 @@ class OptimizerSuite extends PlanTest {
4848
DecrementLiterals) :: Nil
4949
}
5050

51-
val message1 = intercept[TreeNodeException[LogicalPlan]] {
51+
val message1 = intercept[AnalysisException] {
5252
optimizer.execute(analyzed)
5353
}.getMessage
5454
assert(message1.startsWith(s"Max iterations ($maxIterationsNotEnough) reached for batch " +
@@ -58,13 +58,13 @@ class OptimizerSuite extends PlanTest {
5858
try {
5959
optimizer.execute(analyzed)
6060
} catch {
61-
case ex: TreeNodeException[_]
61+
case ex: AnalysisException
6262
if ex.getMessage.contains(SQLConf.OPTIMIZER_MAX_ITERATIONS.key) =>
6363
fail("optimizer.execute should not reach max iterations.")
6464
}
6565
}
6666

67-
val message2 = intercept[TreeNodeException[LogicalPlan]] {
67+
val message2 = intercept[AnalysisException] {
6868
optimizer.execute(analyzed)
6969
}.getMessage
7070
assert(message2.startsWith(s"Max iterations ($maxIterationsNotEnough) reached for batch " +

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,8 @@
1818
package org.apache.spark.sql.catalyst.trees
1919

2020
import org.apache.spark.SparkFunSuite
21-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
21+
import org.apache.spark.sql.AnalysisException
2222
import org.apache.spark.sql.catalyst.expressions.{Expression, IntegerLiteral, Literal}
23-
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2423
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
2524

2625
class RuleExecutorSuite extends SparkFunSuite {
@@ -67,7 +66,7 @@ class RuleExecutorSuite extends SparkFunSuite {
6766
val batches = Batch("fixedPoint", FixedPoint(10), DecrementLiterals) :: Nil
6867
}
6968

70-
val message = intercept[TreeNodeException[LogicalPlan]] {
69+
val message = intercept[AnalysisException] {
7170
ToFixedPoint.execute(Literal(100))
7271
}.getMessage
7372
assert(message.contains("Max iterations (10) reached for batch fixedPoint"))
@@ -84,7 +83,7 @@ class RuleExecutorSuite extends SparkFunSuite {
8483

8584
assert(WithSIChecker.execute(Literal(10)) === Literal(9))
8685

87-
val message = intercept[TreeNodeException[LogicalPlan]] {
86+
val message = intercept[AnalysisException] {
8887
// The input is already invalid as determined by WithSIChecker.isPlanIntegral
8988
WithSIChecker.execute(Literal(10.1))
9089
}.getMessage
@@ -102,7 +101,7 @@ class RuleExecutorSuite extends SparkFunSuite {
102101

103102
assert(WithSICheckerForPositiveLiteral.execute(Literal(2)) === Literal(1))
104103

105-
val message = intercept[TreeNodeException[LogicalPlan]] {
104+
val message = intercept[AnalysisException] {
106105
WithSICheckerForPositiveLiteral.execute(Literal(1))
107106
}.getMessage
108107
assert(message.contains("the structural integrity of the plan is broken"))

sql/core/src/main/scala/org/apache/spark/sql/execution/ExpandExec.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution
1919

2020
import org.apache.spark.rdd.RDD
2121
import org.apache.spark.sql.catalyst.InternalRow
22-
import org.apache.spark.sql.catalyst.errors._
2322
import org.apache.spark.sql.catalyst.expressions._
2423
import org.apache.spark.sql.catalyst.expressions.codegen._
2524
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
@@ -54,7 +53,7 @@ case class ExpandExec(
5453
private[this] val projection =
5554
(exprs: Seq[Expression]) => UnsafeProjection.create(exprs, child.output)
5655

57-
protected override def doExecute(): RDD[InternalRow] = attachTree(this, "execute") {
56+
protected override def doExecute(): RDD[InternalRow] = {
5857
val numOutputRows = longMetric("numOutputRows")
5958

6059
child.execute().mapPartitions { iter =>

0 commit comments

Comments
 (0)