File tree Expand file tree Collapse file tree 2 files changed +16
-2
lines changed
catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors
core/src/test/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +16
-2
lines changed Original file line number Diff line number Diff line change 1717
1818package org .apache .spark .sql .catalyst
1919
20+ import scala .util .control .NonFatal
21+
22+ import org .apache .spark .SparkException
2023import org .apache .spark .sql .catalyst .trees .TreeNode
2124
2225/**
@@ -47,7 +50,10 @@ package object errors {
4750 */
4851 def attachTree [TreeType <: TreeNode [_], A ](tree : TreeType , msg : String = " " )(f : => A ): A = {
4952 try f catch {
50- case e : Exception => throw new TreeNodeException (tree, msg, e)
53+ // SPARK-16748: We do not want SparkExceptions from job failures in the planning phase
54+ // to create TreeNodeException. Hence, wrap exception only if it is not SparkException.
55+ case NonFatal (e) if ! e.isInstanceOf [SparkException ] =>
56+ throw new TreeNodeException (tree, msg, e)
5157 }
5258 }
5359}
Original file line number Diff line number Diff line change @@ -20,7 +20,7 @@ package org.apache.spark.sql
2020import java .math .MathContext
2121import java .sql .Timestamp
2222
23- import org .apache .spark .AccumulatorSuite
23+ import org .apache .spark .{ SparkException , AccumulatorSuite }
2424import org .apache .spark .sql .catalyst .analysis .UnresolvedException
2525import org .apache .spark .sql .catalyst .expressions .SortOrder
2626import org .apache .spark .sql .catalyst .plans .logical .Aggregate
@@ -1339,6 +1339,14 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
13391339 checkAggregation(" SELECT key + 1 + 1, COUNT(*) FROM testData GROUP BY key + 1" , false )
13401340 }
13411341
1342+ testQuietly(
1343+ " SPARK-16748: SparkExceptions during planning should not wrapped in TreeNodeException" ) {
1344+ intercept[SparkException ] {
1345+ val df = spark.range(0 , 5 ).map(x => (1 / x).toString).toDF(" a" ).orderBy(" a" )
1346+ df.queryExecution.toRdd // force physical planning, but not execution of the plan
1347+ }
1348+ }
1349+
13421350 test(" Test to check we can use Long.MinValue" ) {
13431351 checkAnswer(
13441352 sql(s " SELECT ${Long .MinValue } FROM testData ORDER BY key LIMIT 1 " ), Row (Long .MinValue )
You can’t perform that action at this time.
0 commit comments