You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Victor Tso-Guillen <vt...@paxata.com> on 2014/09/24 20:10:40 UTC
java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
Really? What should we make of this?
24 Sep 2014 10:03:36,772 ERROR [Executor task launch worker-52] Executor -
Exception in task ID 40599
java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
at sun.nio.ch.FileChannelImpl.map(FileChannelImpl.java:789)
at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:108)
at
org.apache.spark.storage.BlockManager.doGetLocal(BlockManager.scala:415)
at
org.apache.spark.storage.BlockManager.getLocal(BlockManager.scala:341)
at org.apache.spark.storage.BlockManager.get(BlockManager.scala:508)
at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:96)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:227)
at
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
at
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111)
at org.apache.spark.scheduler.Task.run(Task.scala:51)
at
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:183)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
24 Sep 2014 10:03:40,936 ERROR [pool-1-thread-7] BlockManagerWorker -
Exception handling buffer message
java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
at sun.nio.ch.FileChannelImpl.map(FileChannelImpl.java:789)
at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:108)
at
org.apache.spark.storage.BlockManager.doGetLocal(BlockManager.scala:415)
at
org.apache.spark.storage.BlockManager.getLocalBytes(BlockManager.scala:359)
at
org.apache.spark.storage.BlockManagerWorker.getBlock(BlockManagerWorker.scala:90)
at
org.apache.spark.storage.BlockManagerWorker.processBlockMessage(BlockManagerWorker.scala:69)
at
org.apache.spark.storage.BlockManagerWorker$$anonfun$2.apply(BlockManagerWorker.scala:44)
at
org.apache.spark.storage.BlockManagerWorker$$anonfun$2.apply(BlockManagerWorker.scala:44)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at
org.apache.spark.storage.BlockMessageArray.foreach(BlockMessageArray.scala:28)
at
scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at
org.apache.spark.storage.BlockMessageArray.map(BlockMessageArray.scala:28)
at
org.apache.spark.storage.BlockManagerWorker.onBlockMessageReceive(BlockManagerWorker.scala:44)
at
org.apache.spark.storage.BlockManagerWorker$$anonfun$1.apply(BlockManagerWorker.scala:34)
at
org.apache.spark.storage.BlockManagerWorker$$anonfun$1.apply(BlockManagerWorker.scala:34)
at org.apache.spark.network.ConnectionManager.org
$apache$spark$network$ConnectionManager$$handleMessage(ConnectionManager.scala:662)
at
org.apache.spark.network.ConnectionManager$$anon$9.run(ConnectionManager.scala:504)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Re: java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
Posted by Victor Tso-Guillen <vt...@paxata.com>.
Never mind: https://issues.apache.org/jira/browse/SPARK-1476
On Wed, Sep 24, 2014 at 11:10 AM, Victor Tso-Guillen <vt...@paxata.com>
wrote:
> Really? What should we make of this?
>
> 24 Sep 2014 10:03:36,772 ERROR [Executor task launch worker-52] Executor -
> Exception in task ID 40599
>
> java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
>
> at sun.nio.ch.FileChannelImpl.map(FileChannelImpl.java:789)
>
> at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:108)
>
> at
> org.apache.spark.storage.BlockManager.doGetLocal(BlockManager.scala:415)
>
> at
> org.apache.spark.storage.BlockManager.getLocal(BlockManager.scala:341)
>
> at
> org.apache.spark.storage.BlockManager.get(BlockManager.scala:508)
>
> at
> org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:96)
>
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:227)
>
> at
> org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
>
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
>
> at
> org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111)
>
> at org.apache.spark.scheduler.Task.run(Task.scala:51)
>
> at
> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:183)
>
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>
> at java.lang.Thread.run(Thread.java:724)
>
> 24 Sep 2014 10:03:40,936 ERROR [pool-1-thread-7] BlockManagerWorker -
> Exception handling buffer message
>
> java.lang.IllegalArgumentException: Size exceeds Integer.MAX_VALUE
>
> at sun.nio.ch.FileChannelImpl.map(FileChannelImpl.java:789)
>
> at org.apache.spark.storage.DiskStore.getBytes(DiskStore.scala:108)
>
> at
> org.apache.spark.storage.BlockManager.doGetLocal(BlockManager.scala:415)
>
> at
> org.apache.spark.storage.BlockManager.getLocalBytes(BlockManager.scala:359)
>
> at
> org.apache.spark.storage.BlockManagerWorker.getBlock(BlockManagerWorker.scala:90)
>
> at
> org.apache.spark.storage.BlockManagerWorker.processBlockMessage(BlockManagerWorker.scala:69)
>
> at
> org.apache.spark.storage.BlockManagerWorker$$anonfun$2.apply(BlockManagerWorker.scala:44)
>
> at
> org.apache.spark.storage.BlockManagerWorker$$anonfun$2.apply(BlockManagerWorker.scala:44)
>
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
>
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
>
> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
>
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>
> at
> scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
>
> at
> org.apache.spark.storage.BlockMessageArray.foreach(BlockMessageArray.scala:28)
>
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
>
> at
> org.apache.spark.storage.BlockMessageArray.map(BlockMessageArray.scala:28)
>
> at
> org.apache.spark.storage.BlockManagerWorker.onBlockMessageReceive(BlockManagerWorker.scala:44)
>
> at
> org.apache.spark.storage.BlockManagerWorker$$anonfun$1.apply(BlockManagerWorker.scala:34)
>
> at
> org.apache.spark.storage.BlockManagerWorker$$anonfun$1.apply(BlockManagerWorker.scala:34)
>
> at org.apache.spark.network.ConnectionManager.org
> $apache$spark$network$ConnectionManager$$handleMessage(ConnectionManager.scala:662)
>
> at
> org.apache.spark.network.ConnectionManager$$anon$9.run(ConnectionManager.scala:504)
>
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>
> at java.lang.Thread.run(Thread.java:724)
>