You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
20/11/03 11:38:22 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 1)
java.lang.NullPointerException
at org.biodatageeks.sequila.pileup.model.ExtendedReads.calculateAlts(Read.scala:104)
at org.biodatageeks.sequila.pileup.model.ExtendedReads.analyzeRead(Read.scala:33)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply$mcV$sp(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply(AlignmentsRDD.scala:62)
at scala.Option.fold(Option.scala:158)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3.apply(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3.apply(AlignmentsRDD.scala:44)
at scala.Option.fold(Option.scala:158)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:44)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:37)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:44)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:37)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:121)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
The text was updated successfully, but these errors were encountered:
20/11/03 11:38:22 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 1)
java.lang.NullPointerException
at org.biodatageeks.sequila.pileup.model.ExtendedReads.calculateAlts(Read.scala:104)
at org.biodatageeks.sequila.pileup.model.ExtendedReads.analyzeRead(Read.scala:33)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply$mcV$sp(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3$$anonfun$apply$2.apply(AlignmentsRDD.scala:62)
at scala.Option.fold(Option.scala:158)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3.apply(AlignmentsRDD.scala:62)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1$$anonfun$apply$3.apply(AlignmentsRDD.scala:44)
at scala.Option.fold(Option.scala:158)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:44)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:37)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.rdd.Timer.time(Timer.scala:48)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:44)
at org.biodatageeks.sequila.pileup.model.AlignmentsRDD$$anonfun$assembleContigAggregates$1.apply(AlignmentsRDD.scala:37)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:801)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:337)
at org.apache.spark.rdd.RDD$$anonfun$7.apply(RDD.scala:335)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:121)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
The text was updated successfully, but these errors were encountered: