-
Notifications
You must be signed in to change notification settings - Fork 33
Description
16/05/09 12:07:59 ERROR TaskSetManager: Task 0 in stage 39.0 failed 4 times; aborting job
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 39.0 failed 4 times, most recent failure: Lost task 0.3 in stage 39.0 (TID 248): java.lang.IndexOutOfBoundsException: 10 not in [-10,10)
at breeze.linalg.DenseVector.apply$mcI$sp(DenseVector.scala:71)
at breeze.linalg.DenseVector.apply(DenseVector.scala:70)
at breeze.linalg.DenseVector.apply(DenseVector.scala:50)
at breeze.linalg.TensorLike$class.apply$mcII$sp(Tensor.scala:94)
at breeze.linalg.DenseVector.apply$mcII$sp(DenseVector.scala:50)
at org.apache.spark.mllib.topicModeling.GibbsLDAAliasSampler$$anonfun$2$$anonfun$apply$4$$anonfun$apply$1.apply$mcVI$sp(GibbsLDASampler.scala:299)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.mllib.topicModeling.GibbsLDAAliasSampler$$anonfun$2$$anonfun$apply$4.apply(GibbsLDASampler.scala:282)
at org.apache.spark.mllib.topicModeling.GibbsLDAAliasSampler$$anonfun$2$$anonfun$apply$4.apply(GibbsLDASampler.scala:275)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
at org.apache.spark.graphx.impl.EdgePartition.map(EdgePartition.scala:185)
at org.apache.spark.graphx.impl.GraphImpl$$anonfun$7.apply(GraphImpl.scala:156)
at org.apache.spark.graphx.impl.GraphImpl$$anonfun$7.apply(GraphImpl.scala:155)
at org.apache.spark.graphx.impl.EdgeRDDImpl$$anonfun$mapEdgePartitions$1.apply(EdgeRDDImpl.scala:121)
at org.apache.spark.graphx.impl.EdgeRDDImpl$$anonfun$mapEdgePartitions$1.apply(EdgeRDDImpl.scala:118)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$20.apply(RDD.scala:710)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$20.apply(RDD.scala:710)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:69)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:268)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
at org.apache.spark.scheduler.Task.run(Task.scala:89)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)