Skip to content

Commit f07ab09

Browse files
committed
Throw SparkOutOfMemoryError in HashAggregateExec, too.
1 parent a8e1c98 commit f07ab09

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.sql.execution.aggregate
1919

2020
import org.apache.spark.TaskContext
21-
import org.apache.spark.memory.TaskMemoryManager
21+
import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager}
2222
import org.apache.spark.rdd.RDD
2323
import org.apache.spark.sql.catalyst.InternalRow
2424
import org.apache.spark.sql.catalyst.errors._
@@ -762,6 +762,8 @@ case class HashAggregateExec(
762762
("true", "true", "", "")
763763
}
764764

765+
val oomeClassName = classOf[SparkOutOfMemoryError].getName
766+
765767
val findOrInsertRegularHashMap: String =
766768
s"""
767769
|// generate grouping key
@@ -787,7 +789,7 @@ case class HashAggregateExec(
787789
| $unsafeRowKeys, ${hashEval.value});
788790
| if ($unsafeRowBuffer == null) {
789791
| // failed to allocate the first page
790-
| throw new OutOfMemoryError("No enough memory for aggregation");
792+
| throw new $oomeClassName("No enough memory for aggregation");
791793
| }
792794
|}
793795
""".stripMargin

0 commit comments

Comments
 (0)