Regression

org.bdgenomics.adam.rdd.ADAMContextSuite.load slices from data frame

Failing for the past 1 build (Since Failed#3023 )
Took 0.17 sec.

Error Message

Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.IllegalArgumentException: Class is not registered: org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine
Note: To register this class use: kryo.register(org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine.class);
 at com.esotericsoftware.kryo.Kryo.getRegistration(Kryo.java:458)
 at com.esotericsoftware.kryo.util.DefaultClassResolver.writeClass(DefaultClassResolver.java:79)
 at com.esotericsoftware.kryo.Kryo.writeClass(Kryo.java:488)
 at com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:593)
 at org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:241)
 at org.apache.spark.serializer.SerializationStream.writeValue(Serializer.scala:134)
 at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:241)
 at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
 at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
 at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)
 at org.apache.spark.scheduler.Task.run(Task.scala:121)
 at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:411)
 at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
 at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
 at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
 at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
 at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:

Stacktrace

      org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.IllegalArgumentException: Class is not registered: org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine
Note: To register this class use: kryo.register(org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine.class);
	at com.esotericsoftware.kryo.Kryo.getRegistration(Kryo.java:458)
	at com.esotericsoftware.kryo.util.DefaultClassResolver.writeClass(DefaultClassResolver.java:79)
	at com.esotericsoftware.kryo.Kryo.writeClass(Kryo.java:488)
	at com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:593)
	at org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:241)
	at org.apache.spark.serializer.SerializationStream.writeValue(Serializer.scala:134)
	at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:241)
	at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)
	at org.apache.spark.scheduler.Task.run(Task.scala:121)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:411)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
      at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:1889)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:1877)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:1876)
      at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
      at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
      at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:926)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:926)
      at scala.Option.foreach(Option.scala:274)
      at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048)
      at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
      at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737)
      at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061)
      at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082)
      at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101)
      at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126)
      at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:945)
      at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
      at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
      at org.apache.spark.rdd.RDD.collect(RDD.scala:944)
      at org.bdgenomics.adam.converters.FastaSliceConverter3$.apply(FastaSliceConverter3.scala:69)
      at org.bdgenomics.adam.rdd.ADAMContext.$anonfun$loadFastaDna$1(ADAMContext.scala:4044)
      at scala.Option.fold(Option.scala:175)
      at org.apache.spark.rdd.Timer.time(Timer.scala:48)
      at org.bdgenomics.adam.rdd.ADAMContext.loadFastaDna(ADAMContext.scala:4032)
      at org.bdgenomics.adam.rdd.ADAMContext.$anonfun$loadSlices$1(ADAMContext.scala:4081)
      at scala.Option.fold(Option.scala:175)
      at org.apache.spark.rdd.Timer.time(Timer.scala:48)
      at org.bdgenomics.adam.rdd.ADAMContext.loadSlices(ADAMContext.scala:4074)
      at org.bdgenomics.adam.rdd.ADAMContextSuite.$anonfun$new$124(ADAMContextSuite.scala:896)
      at org.bdgenomics.utils.misc.SparkFunSuite.$anonfun$sparkTest$1(SparkFunSuite.scala:111)
      at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.scalatest.TestSuite.withFixture(TestSuite.scala:196)
      at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195)
      at org.scalatest.FunSuite.withFixture(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
      at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
      at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
      at org.bdgenomics.adam.util.ADAMFunSuite.org$scalatest$BeforeAndAfter$$super$runTest(ADAMFunSuite.scala:24)
      at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
      at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
      at org.bdgenomics.adam.util.ADAMFunSuite.runTest(ADAMFunSuite.scala:24)
      at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
      at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
      at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite.run(Suite.scala:1124)
      at org.scalatest.Suite.run$(Suite.scala:1106)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
      at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
      at org.bdgenomics.adam.util.ADAMFunSuite.org$scalatest$BeforeAndAfter$$super$run(ADAMFunSuite.scala:24)
      at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
      at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
      at org.bdgenomics.adam.util.ADAMFunSuite.run(ADAMFunSuite.scala:24)
      at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1187)
      at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1234)
      at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
      at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
      at org.scalatest.Suite.runNestedSuites(Suite.scala:1232)
      at org.scalatest.Suite.runNestedSuites$(Suite.scala:1166)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite.run(Suite.scala:1121)
      at org.scalatest.Suite.run$(Suite.scala:1106)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1349)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1343)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1343)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1033)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1011)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1509)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1011)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)
      Cause: java.lang.IllegalArgumentException: Class is not registered: org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine
Note: To register this class use: kryo.register(org.bdgenomics.adam.converters.FastaSliceConverter3$FastaDescriptionLine.class);
      at com.esotericsoftware.kryo.Kryo.getRegistration(Kryo.java:458)
      at com.esotericsoftware.kryo.util.DefaultClassResolver.writeClass(DefaultClassResolver.java:79)
      at com.esotericsoftware.kryo.Kryo.writeClass(Kryo.java:488)
      at com.esotericsoftware.kryo.Kryo.writeClassAndObject(Kryo.java:593)
      at org.apache.spark.serializer.KryoSerializationStream.writeObject(KryoSerializer.scala:241)
      at org.apache.spark.serializer.SerializationStream.writeValue(Serializer.scala:134)
      at org.apache.spark.storage.DiskBlockObjectWriter.write(DiskBlockObjectWriter.scala:241)
      at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:151)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55)
      at org.apache.spark.scheduler.Task.run(Task.scala:121)
      at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:411)
      at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)