Skip to content

Commit 2cf5940

Browse files
committed
Fix minor exception messages of HashedRelation and HashedJoin
1 parent 4aee19e commit 2cf5940

File tree

2 files changed

+8
-16
lines changed

2 files changed

+8
-16
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,10 @@ import org.codehaus.janino.InternalCompilerException
2828

2929
import org.apache.spark.{Partition, SparkException, SparkUpgradeException}
3030
import org.apache.spark.executor.CommitDeniedException
31+
import org.apache.spark.memory.SparkOutOfMemoryError
3132
import org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator
3233
import org.apache.spark.sql.catalyst.catalog.CatalogDatabase
3334
import org.apache.spark.sql.catalyst.expressions.{Expression, UnevaluableAggregate}
34-
import org.apache.spark.sql.catalyst.plans.JoinType
3535
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
3636
import org.apache.spark.sql.connector.catalog.Identifier
3737
import org.apache.spark.sql.connector.expressions.Transform
@@ -712,16 +712,6 @@ object QueryExecutionErrors {
712712
"Dictionary encoding should not be used because of dictionary overflow.")
713713
}
714714

715-
def hashJoinCannotTakeJoinTypeWithBuildLeftError(joinType: JoinType): Throwable = {
716-
new IllegalArgumentException(
717-
s"HashJoin should not take $joinType as the JoinType with building left side")
718-
}
719-
720-
def hashJoinCannotTakeJoinTypeWithBuildRightError(joinType: JoinType): Throwable = {
721-
new IllegalArgumentException(
722-
s"HashJoin should not take $joinType as the JoinType with building right side")
723-
}
724-
725715
def endOfIteratorError(): Throwable = {
726716
new NoSuchElementException("End of the iterator")
727717
}
@@ -730,11 +720,15 @@ object QueryExecutionErrors {
730720
new IOException("Could not allocate memory to grow BytesToBytesMap")
731721
}
732722

733-
def cannotAcquireMemoryToBuildHashRelationError(size: Long, got: Long): Throwable = {
723+
def cannotAcquireMemoryToBuildLongHashedRelationError(size: Long, got: Long): Throwable = {
734724
new SparkException(s"Can't acquire $size bytes memory to build hash relation, " +
735725
s"got $got bytes")
736726
}
737727

728+
def cannotAcquireMemoryToBuildUnsafeHashedRelationError(): Throwable = {
729+
new SparkOutOfMemoryError("There is not enough memory to build hash map")
730+
}
731+
738732
def rowLargerThan256MUnsupportedError(): Throwable = {
739733
new UnsupportedOperationException("Does not support row that is larger than 256M")
740734
}

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -476,9 +476,7 @@ private[joins] object UnsafeHashedRelation {
476476
row.getBaseObject, row.getBaseOffset, row.getSizeInBytes)
477477
if (!success) {
478478
binaryMap.free()
479-
// scalastyle:off throwerror
480-
throw new SparkOutOfMemoryError("There is not enough memory to build hash map")
481-
// scalastyle:on throwerror
479+
throw QueryExecutionErrors.cannotAcquireMemoryToBuildUnsafeHashedRelationError()
482480
}
483481
} else if (isNullAware) {
484482
return HashedRelationWithAllNullKeys
@@ -577,7 +575,7 @@ private[execution] final class LongToUnsafeRowMap(val mm: TaskMemoryManager, cap
577575
val got = acquireMemory(size)
578576
if (got < size) {
579577
freeMemory(got)
580-
throw QueryExecutionErrors.cannotAcquireMemoryToBuildHashRelationError(size, got)
578+
throw QueryExecutionErrors.cannotAcquireMemoryToBuildLongHashedRelationError(size, got)
581579
}
582580
}
583581

0 commit comments

Comments
 (0)