Skip to content

Commit ab5120f

Browse files
committed
Fixed bug
1 parent 3fd39b8 commit ab5120f

File tree

2 files changed

+16
-2
lines changed

2 files changed

+16
-2
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
package org.apache.spark.sql.catalyst
1919

20+
import scala.util.control.NonFatal
21+
22+
import org.apache.spark.SparkException
2023
import org.apache.spark.sql.catalyst.trees.TreeNode
2124

2225
/**
@@ -47,7 +50,10 @@ package object errors {
4750
*/
4851
def attachTree[TreeType <: TreeNode[_], A](tree: TreeType, msg: String = "")(f: => A): A = {
4952
try f catch {
50-
case e: Exception => throw new TreeNodeException(tree, msg, e)
53+
// SPARK-16748: We do not want SparkExceptions from job failures in the planning phase
54+
// to create TreeNodeException. Hence, wrap exception only if it is not SparkException.
55+
case NonFatal(e) if !e.isInstanceOf[SparkException] =>
56+
throw new TreeNodeException(tree, msg, e)
5157
}
5258
}
5359
}

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql
2020
import java.math.MathContext
2121
import java.sql.Timestamp
2222

23-
import org.apache.spark.AccumulatorSuite
23+
import org.apache.spark.{SparkException, AccumulatorSuite}
2424
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
2525
import org.apache.spark.sql.catalyst.expressions.SortOrder
2626
import org.apache.spark.sql.catalyst.plans.logical.Aggregate
@@ -1339,6 +1339,14 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
13391339
checkAggregation("SELECT key + 1 + 1, COUNT(*) FROM testData GROUP BY key + 1", false)
13401340
}
13411341

1342+
testQuietly(
1343+
"SPARK-16748: SparkExceptions during planning should not wrapped in TreeNodeException") {
1344+
intercept[SparkException] {
1345+
val df = spark.range(0, 5).map(x => (1 / x).toString).toDF("a").orderBy("a")
1346+
df.queryExecution.toRdd // force physical planning, but not execution of the plan
1347+
}
1348+
}
1349+
13421350
test("Test to check we can use Long.MinValue") {
13431351
checkAnswer(
13441352
sql(s"SELECT ${Long.MinValue} FROM testData ORDER BY key LIMIT 1"), Row(Long.MinValue)

0 commit comments

Comments
 (0)