Open
Description
Describe the bug
java.lang.RuntimeException: Error during calling Java code from native code: java.lang.UnsupportedOperationException: Not enough spark off-heap execution memory. Acquired: 8388608, granted: 0. Try tweaking config option spark.memory.offHeap.size to get larger space to run this application.
at io.glutenproject.memory.alloc.GlutenManagedReservationListener.reserveOrThrow(GlutenManagedReservationListener.java:53)
at io.glutenproject.vectorized.ArrowOutIterator.nativeHasNext(Native Method)
at io.glutenproject.vectorized.ArrowOutIterator.hasNextInternal(ArrowOutIterator.java:48)
at io.glutenproject.vectorized.GeneralOutIterator.hasNext(GeneralOutIterator.java:37)
at io.glutenproject.backendsapi.glutendata.GlutenIteratorApi$$anon$3.hasNext(GlutenIteratorApi.scala:300)
at io.glutenproject.vectorized.CloseableColumnBatchIterator.hasNext(CloseableColumnBatchIterator.scala:41)
at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:460)
at org.apache.spark.shuffle.GlutenColumnarShuffleWriter.internalWrite(GlutenColumnarShuffleWriter.scala:96)
at org.apache.spark.shuffle.GlutenColumnarShuffleWriter.write(GlutenColumnarShuffleWriter.scala:196)
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
at org.apache.spark.scheduler.Task.run(Task.scala:136)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:552)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1533)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:555)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
at io.glutenproject.vectorized.ArrowOutIterator.nativeHasNext(Native Method)
at io.glutenproject.vectorized.ArrowOutIterator.hasNextInternal(ArrowOutIterator.java:48)
at io.glutenproject.vectorized.GeneralOutIterator.hasNext(GeneralOutIterator.java:37)
at io.glutenproject.backendsapi.glutendata.GlutenIteratorApi$$anon$3.hasNext(GlutenIteratorApi.scala:300)
at io.glutenproject.vectorized.CloseableColumnBatchIterator.hasNext(CloseableColumnBatchIterator.scala:41)
at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:460)
at org.apache.spark.shuffle.GlutenColumnarShuffleWriter.internalWrite(GlutenColumnarShuffleWriter.scala:96)
at org.apache.spark.shuffle.GlutenColumnarShuffleWriter.write(GlutenColumnarShuffleWriter.scala:196)
at org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)
at org.apache.spark.scheduler.Task.run(Task.scala:136)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:552)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1533)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:555)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
To Reproduce
spark version: 3.3.1
executor memory: 8g
executor cores: 8
executor memoryOverhead: 1g
offHeap.size: 24g