From d454b06d55d21bd1659cada984360855df76255c Mon Sep 17 00:00:00 2001 From: Peter Toth Date: Fri, 9 Jul 2021 13:57:23 +0200 Subject: [PATCH 1/4] [SPARK-36073][SQL] SubExpr elimination should include common child exprs of conditional expressions --- .../expressions/EquivalentExpressions.scala | 94 +++++++++++++------ .../SubexpressionEliminationSuite.scala | 15 +++ 2 files changed, 81 insertions(+), 28 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala index ef04e8825811..5c6a4556d8dc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala @@ -38,19 +38,43 @@ class EquivalentExpressions { * Returns true if there was already a matching expression. */ def addExpr(expr: Expression): Boolean = { - addExprToMap(expr, equivalenceMap) + updateExprInMap(expr, equivalenceMap) } - private def addExprToMap( - expr: Expression, map: mutable.HashMap[ExpressionEquals, ExpressionStats]): Boolean = { + /** + * Adds or removes an expression to/from the map and updates `useCount`. + * Returns true + * - if there was a matching expression in the map before add or + * - if there remained a matching expression in the map after remove (`useCount` remained > 0) + * to indicate there is no need to recurse in `updateExprTree`. + */ + private def updateExprInMap( + expr: Expression, + map: mutable.HashMap[ExpressionEquals, ExpressionStats], + useCount: Int = 1): Boolean = { if (expr.deterministic) { val wrapper = ExpressionEquals(expr) map.get(wrapper) match { case Some(stats) => - stats.useCount += 1 - true + stats.useCount += useCount + if (stats.useCount > 0) { + true + } else if (stats.useCount == 0) { + map -= wrapper + false + } else { + // Should not happen + throw new IllegalArgumentException( + s"Cannot update expression: $expr in map: $map with use count: $useCount") + } case _ => - map.put(wrapper, ExpressionStats(expr)()) + if (useCount > 0) { + map.put(wrapper, ExpressionStats(expr)(useCount)) + } else { + // Should not happen + throw new IllegalArgumentException( + s"Cannot update expression: $expr in map: $map with use count: $useCount") + } false } } else { @@ -70,34 +94,39 @@ class EquivalentExpressions { * For example, if `((a + b) + c)` and `(a + b)` are common expressions, we only add * `((a + b) + c)`. */ - private def addCommonExprs( + private def updateCommonExprs( exprs: Seq[Expression], - map: mutable.HashMap[ExpressionEquals, ExpressionStats]): Unit = { + map: mutable.HashMap[ExpressionEquals, ExpressionStats], + useCount: Int): Unit = { assert(exprs.length > 1) var localEquivalenceMap = mutable.HashMap.empty[ExpressionEquals, ExpressionStats] - addExprTree(exprs.head, localEquivalenceMap) + updateExprTree(exprs.head, localEquivalenceMap) exprs.tail.foreach { expr => val otherLocalEquivalenceMap = mutable.HashMap.empty[ExpressionEquals, ExpressionStats] - addExprTree(expr, otherLocalEquivalenceMap) + updateExprTree(expr, otherLocalEquivalenceMap) localEquivalenceMap = localEquivalenceMap.filter { case (key, _) => otherLocalEquivalenceMap.contains(key) } } - localEquivalenceMap.foreach { case (commonExpr, state) => - val possibleParents = localEquivalenceMap.filter { case (_, v) => v.height > state.height } - val notChild = possibleParents.forall { case (k, _) => - k == commonExpr || k.e.find(_.semanticEquals(commonExpr.e)).isEmpty - } - if (notChild) { - // If the `commonExpr` already appears in the equivalence map, calling `addExprTree` will - // increase the `useCount` and mark it as a common subexpression. Otherwise, `addExprTree` - // will recursively add `commonExpr` and its descendant to the equivalence map, in case - // they also appear in other places. For example, `If(a + b > 1, a + b + c, a + b + c)`, - // `a + b` also appears in the condition and should be treated as common subexpression. - addExprTree(commonExpr.e, map) - } + // Start with the highest common expression, update `map` with the expression and remove it (and + // its children recursively if required) from `localEquivalenceMap`. The remaining highest + // expression in `localEquivalenceMap` is also common expression. + var statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.values.maxBy(_.height)) + while (statsOption.nonEmpty) { + val stats = statsOption.get + updateExprTree(stats.expr, localEquivalenceMap, -stats.useCount) + + // If `add` is true and the `commonExpr` already appears in the equivalence map, calling + // `updateExprTree` will increase the `useCount` and mark it as a common subexpression. + // Otherwise, `addExprTree` will recursively add `commonExpr` and its descendant to the + // equivalence map, in case they also appear in other places. For example, + // `If(a + b > 1, a + b + c, a + b + c)`, `a + b` also appears in the condition and should be + // treated as common subexpression. If `add` is false then the other way around. + updateExprTree(stats.expr, map, useCount) + + statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.values.maxBy(_.height)) } } @@ -159,7 +188,15 @@ class EquivalentExpressions { def addExprTree( expr: Expression, map: mutable.HashMap[ExpressionEquals, ExpressionStats] = equivalenceMap): Unit = { - val skip = expr.isInstanceOf[LeafExpression] || + updateExprTree(expr, map) + } + + private def updateExprTree( + expr: Expression, + map: mutable.HashMap[ExpressionEquals, ExpressionStats] = equivalenceMap, + useCount: Int = 1): Unit = { + val skip = useCount == 0 || + expr.isInstanceOf[LeafExpression] || // `LambdaVariable` is usually used as a loop variable, which can't be evaluated ahead of the // loop. So we can't evaluate sub-expressions containing `LambdaVariable` at the beginning. expr.find(_.isInstanceOf[LambdaVariable]).isDefined || @@ -167,9 +204,10 @@ class EquivalentExpressions { // can cause error like NPE. (expr.isInstanceOf[PlanExpression[_]] && TaskContext.get != null) - if (!skip && !addExprToMap(expr, map)) { - childrenToRecurse(expr).foreach(addExprTree(_, map)) - commonChildrenToRecurse(expr).filter(_.nonEmpty).foreach(addCommonExprs(_, map)) + if (!skip && !updateExprInMap(expr, map, useCount)) { + val uc = useCount.signum + childrenToRecurse(expr).foreach(updateExprTree(_, map, uc)) + commonChildrenToRecurse(expr).filter(_.nonEmpty).foreach(updateCommonExprs(_, map, uc)) } } @@ -226,7 +264,7 @@ case class ExpressionEquals(e: Expression) { * Instead of appending to a mutable list/buffer of Expressions, just update the "flattened" * useCount in this wrapper in-place. */ -case class ExpressionStats(expr: Expression)(var useCount: Int = 1) { +case class ExpressionStats(expr: Expression)(var useCount: Int) { // This is used to do a fast pre-check for child-parent relationship. For example, expr1 can // only be a parent of expr2 if expr1.height is larger than expr2.height. lazy val height = getHeight(expr) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala index 6fc9d04843a1..40b74158a80b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala @@ -323,6 +323,21 @@ class SubexpressionEliminationSuite extends SparkFunSuite with ExpressionEvalHel assert(commonExprs.head.expr eq add3) } + test("SPARK-36073: SubExpr elimination should include common child exprs of conditional " + + "expressions") { + val add = Add(Literal(1), Literal(2)) + val ifExpr1 = If(Literal(true), add, Literal(3)) + val ifExpr3 = If(GreaterThan(add, Literal(4)), Add(ifExpr1, add), Multiply(ifExpr1, add)) + + val equivalence = new EquivalentExpressions + equivalence.addExprTree(ifExpr3) + + val commonExprs = equivalence.getAllExprStates(1) + assert(commonExprs.size == 1) + assert(commonExprs.head.useCount == 2) + assert(commonExprs.head.expr eq add) + } + test("SPARK-35439: Children subexpr should come first than parent subexpr") { val add = Add(Literal(1), Literal(2)) From dc9dc31c57d3dab27528b68eb421be058e08e28f Mon Sep 17 00:00:00 2001 From: Peter Toth Date: Fri, 9 Jul 2021 18:13:58 +0200 Subject: [PATCH 2/4] fix transparently canonicalized expressions --- .../expressions/EquivalentExpressions.scala | 30 ++++++++++--------- .../SubexpressionEliminationSuite.scala | 13 ++++++++ 2 files changed, 29 insertions(+), 14 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala index 5c6a4556d8dc..518cb9dca277 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql.catalyst.expressions +import java.util.Objects + import scala.collection.mutable import org.apache.spark.TaskContext @@ -113,7 +115,7 @@ class EquivalentExpressions { // Start with the highest common expression, update `map` with the expression and remove it (and // its children recursively if required) from `localEquivalenceMap`. The remaining highest // expression in `localEquivalenceMap` is also common expression. - var statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.values.maxBy(_.height)) + var statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.maxBy(_._1.height)._2) while (statsOption.nonEmpty) { val stats = statsOption.get updateExprTree(stats.expr, localEquivalenceMap, -stats.useCount) @@ -126,7 +128,7 @@ class EquivalentExpressions { // treated as common subexpression. If `add` is false then the other way around. updateExprTree(stats.expr, map, useCount) - statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.values.maxBy(_.height)) + statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.maxBy(_._1.height)._2) } } @@ -221,7 +223,7 @@ class EquivalentExpressions { // Exposed for testing. private[sql] def getAllExprStates(count: Int = 0): Seq[ExpressionStats] = { - equivalenceMap.values.filter(_.useCount > count).toSeq.sortBy(_.height) + equivalenceMap.filter(_._2.useCount > count).toSeq.sortBy(_._1.height).map(_._2) } /** @@ -249,12 +251,20 @@ class EquivalentExpressions { * Wrapper around an Expression that provides semantic equality. */ case class ExpressionEquals(e: Expression) { + private def getHeight(tree: Expression): Int = { + tree.children.map(getHeight).reduceOption(_ max _).getOrElse(0) + 1 + } + + // This is used to do a fast pre-check for child-parent relationship. For example, expr1 can + // only be a parent of expr2 if expr1.height is larger than expr2.height. + lazy val height = getHeight(e) + override def equals(o: Any): Boolean = o match { - case other: ExpressionEquals => e.semanticEquals(other.e) + case other: ExpressionEquals => e.semanticEquals(other.e) && height == other.height case _ => false } - override def hashCode: Int = e.semanticHash() + override def hashCode: Int = Objects.hash(e.semanticHash(): Integer, height: Integer) } /** @@ -264,12 +274,4 @@ case class ExpressionEquals(e: Expression) { * Instead of appending to a mutable list/buffer of Expressions, just update the "flattened" * useCount in this wrapper in-place. */ -case class ExpressionStats(expr: Expression)(var useCount: Int) { - // This is used to do a fast pre-check for child-parent relationship. For example, expr1 can - // only be a parent of expr2 if expr1.height is larger than expr2.height. - lazy val height = getHeight(expr) - - private def getHeight(tree: Expression): Int = { - tree.children.map(getHeight).reduceOption(_ max _).getOrElse(0) + 1 - } -} +case class ExpressionStats(expr: Expression)(var useCount: Int) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala index 40b74158a80b..fa3003b27578 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/SubexpressionEliminationSuite.scala @@ -338,6 +338,19 @@ class SubexpressionEliminationSuite extends SparkFunSuite with ExpressionEvalHel assert(commonExprs.head.expr eq add) } + test("SPARK-36073: Transparently canonicalized expressions are not necessary subexpressions") { + val add = Add(Literal(1), Literal(2)) + val transparent = PromotePrecision(add) + + val equivalence = new EquivalentExpressions + equivalence.addExprTree(transparent) + + val commonExprs = equivalence.getAllExprStates() + assert(commonExprs.size == 2) + assert(commonExprs.map(_.useCount) === Seq(1, 1)) + assert(commonExprs.map(_.expr) === Seq(add, transparent)) + } + test("SPARK-35439: Children subexpr should come first than parent subexpr") { val add = Add(Literal(1), Literal(2)) From 227cad1af875d9c432fae9a7428d23d42cb4392e Mon Sep 17 00:00:00 2001 From: Peter Toth Date: Tue, 9 Nov 2021 15:57:19 +0100 Subject: [PATCH 3/4] fix comments and review findings --- .../expressions/EquivalentExpressions.scala | 38 ++++++++----------- .../sql/errors/QueryExecutionErrors.scala | 13 ++++++- 2 files changed, 27 insertions(+), 24 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala index 518cb9dca277..f1d22ac19a44 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala @@ -24,6 +24,7 @@ import scala.collection.mutable import org.apache.spark.TaskContext import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.catalyst.expressions.objects.LambdaVariable +import org.apache.spark.sql.errors.QueryExecutionErrors /** * This class is used to compute equality of (sub)expression trees. Expressions can be added @@ -66,16 +67,14 @@ class EquivalentExpressions { false } else { // Should not happen - throw new IllegalArgumentException( - s"Cannot update expression: $expr in map: $map with use count: $useCount") + throw QueryExecutionErrors.updateEquivalentExpressionsError(expr, map, useCount) } case _ => if (useCount > 0) { map.put(wrapper, ExpressionStats(expr)(useCount)) } else { // Should not happen - throw new IllegalArgumentException( - s"Cannot update expression: $expr in map: $map with use count: $useCount") + throw QueryExecutionErrors.updateEquivalentExpressionsError(expr, map, useCount) } false } @@ -85,16 +84,16 @@ class EquivalentExpressions { } /** - * Adds only expressions which are common in each of given expressions, in a recursive way. - * For example, given two expressions `(a + (b + (c + 1)))` and `(d + (e + (c + 1)))`, - * the common expression `(c + 1)` will be added into `equivalenceMap`. + * Adds or removes only expressions which are common in each of given expressions, in a recursive + * way. + * For example, given two expressions `(a + (b + (c + 1)))` and `(d + (e + (c + 1)))`, the common + * expression `(c + 1)` will be added into `equivalenceMap`. * - * Note that as we don't know in advance if any child node of an expression will be common - * across all given expressions, we count all child nodes when looking through the given - * expressions. But when we call `addExprTree` to add common expressions into the map, we - * will add recursively the child nodes. So we need to filter the child expressions first. - * For example, if `((a + b) + c)` and `(a + b)` are common expressions, we only add - * `((a + b) + c)`. + * Note that as we don't know in advance if any child node of an expression will be common across + * all given expressions, we compute local equivalence maps for all given expressions and filter + * only the common nodes. + * Those common nodes are then removed from the local map and added to the final map of + * expressions. */ private def updateCommonExprs( exprs: Seq[Expression], @@ -112,20 +111,13 @@ class EquivalentExpressions { } } - // Start with the highest common expression, update `map` with the expression and remove it (and - // its children recursively if required) from `localEquivalenceMap`. The remaining highest - // expression in `localEquivalenceMap` is also common expression. + // Start with the highest expression, remove it from `localEquivalenceMap` and add it to `map`. + // The remaining highest expression in `localEquivalenceMap` is also common expression so loop + // until `localEquivalenceMap` is not empty. var statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.maxBy(_._1.height)._2) while (statsOption.nonEmpty) { val stats = statsOption.get updateExprTree(stats.expr, localEquivalenceMap, -stats.useCount) - - // If `add` is true and the `commonExpr` already appears in the equivalence map, calling - // `updateExprTree` will increase the `useCount` and mark it as a common subexpression. - // Otherwise, `addExprTree` will recursively add `commonExpr` and its descendant to the - // equivalence map, in case they also appear in other places. For example, - // `If(a + b > 1, a + b + c, a + b + c)`, `a + b` also appears in the condition and should be - // treated as common subexpression. If `add` is false then the other way around. updateExprTree(stats.expr, map, useCount) statsOption = Some(localEquivalenceMap).filter(_.nonEmpty).map(_.maxBy(_._1.height)._2) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 9696c3a0b6e1..a564ed9269f0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -28,6 +28,8 @@ import java.time.temporal.ChronoField import java.util.ConcurrentModificationException import java.util.concurrent.TimeoutException +import scala.collection.mutable + import com.fasterxml.jackson.core.{JsonParser, JsonToken} import org.apache.hadoop.fs.{FileAlreadyExistsException, FileStatus, Path} import org.apache.hadoop.fs.permission.FsPermission @@ -42,7 +44,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection.Schema import org.apache.spark.sql.catalyst.WalkedTypePath import org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable} -import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression, UnevaluableAggregate} +import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression, ExpressionEquals, ExpressionStats, UnevaluableAggregate} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.JoinType import org.apache.spark.sql.catalyst.plans.logical.{DomainJoin, LogicalPlan} @@ -1889,4 +1891,13 @@ object QueryExecutionErrors { def hiveTableWithAnsiIntervalsError(tableName: String): Throwable = { new UnsupportedOperationException(s"Hive table $tableName with ANSI intervals is not supported") } + + def updateEquivalentExpressionsError( + expr: Expression, + map: mutable.HashMap[ExpressionEquals, ExpressionStats], + useCount: Int): Throwable = { + throw new IllegalArgumentException( + s"Cannot update expression: $expr in map: $map with use count: $useCount") + } } + From c7c7016147abbf4c88c0b22b626ea6fd6b927413 Mon Sep 17 00:00:00 2001 From: Peter Toth Date: Wed, 10 Nov 2021 16:27:13 +0100 Subject: [PATCH 4/4] fix exception --- .../catalyst/expressions/EquivalentExpressions.scala | 7 ++++--- .../spark/sql/errors/QueryExecutionErrors.scala | 12 +----------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala index f1d22ac19a44..269ab31698b5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/EquivalentExpressions.scala @@ -24,7 +24,6 @@ import scala.collection.mutable import org.apache.spark.TaskContext import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback import org.apache.spark.sql.catalyst.expressions.objects.LambdaVariable -import org.apache.spark.sql.errors.QueryExecutionErrors /** * This class is used to compute equality of (sub)expression trees. Expressions can be added @@ -67,14 +66,16 @@ class EquivalentExpressions { false } else { // Should not happen - throw QueryExecutionErrors.updateEquivalentExpressionsError(expr, map, useCount) + throw new IllegalStateException( + s"Cannot update expression: $expr in map: $map with use count: $useCount") } case _ => if (useCount > 0) { map.put(wrapper, ExpressionStats(expr)(useCount)) } else { // Should not happen - throw QueryExecutionErrors.updateEquivalentExpressionsError(expr, map, useCount) + throw new IllegalStateException( + s"Cannot update expression: $expr in map: $map with use count: $useCount") } false } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index a564ed9269f0..d7cd8e13f0c5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -28,8 +28,6 @@ import java.time.temporal.ChronoField import java.util.ConcurrentModificationException import java.util.concurrent.TimeoutException -import scala.collection.mutable - import com.fasterxml.jackson.core.{JsonParser, JsonToken} import org.apache.hadoop.fs.{FileAlreadyExistsException, FileStatus, Path} import org.apache.hadoop.fs.permission.FsPermission @@ -44,7 +42,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection.Schema import org.apache.spark.sql.catalyst.WalkedTypePath import org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable} -import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression, ExpressionEquals, ExpressionStats, UnevaluableAggregate} +import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression, UnevaluableAggregate} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.JoinType import org.apache.spark.sql.catalyst.plans.logical.{DomainJoin, LogicalPlan} @@ -1891,13 +1889,5 @@ object QueryExecutionErrors { def hiveTableWithAnsiIntervalsError(tableName: String): Throwable = { new UnsupportedOperationException(s"Hive table $tableName with ANSI intervals is not supported") } - - def updateEquivalentExpressionsError( - expr: Expression, - map: mutable.HashMap[ExpressionEquals, ExpressionStats], - useCount: Int): Throwable = { - throw new IllegalArgumentException( - s"Cannot update expression: $expr in map: $map with use count: $useCount") - } }