java.util.concurrent.ExecutionException, cause -> scala.MatchError: None (of class scala.None$)
org.apache.spark.storage.BlockManager.putIterator(BlockManager.scala:732)
org.apache.spark.storage.BlockManager.putSingle(BlockManager.scala:1281)
org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:122)
org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:88)
org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:56)
org.apache.spark.SparkContext.broadcast(SparkContext.scala:1410)
org.apache.spark.rdd.NewHadoopRDD.<init>(NewHadoopRDD.scala:78)
org.apache.spark.SparkContext$$anonfun$newAPIHadoopRDD$1.apply(SparkContext.scala:1142)
org.apache.spark.SparkContext$$anonfun$newAPIHadoopRDD$1.apply(SparkContext.scala:1132)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
org.apache.spark.SparkContext.withScope(SparkContext.scala:701)
org.apache.spark.SparkContext.newAPIHadoopRDD(SparkContext.scala:1132)
...
com.cloudera.livy.rsc.driver.BypassJob.call(BypassJob.java:40)
com.cloudera.livy.rsc.driver.BypassJob.call(BypassJob.java:27)
com.cloudera.livy.rsc.driver.JobWrapper.call(JobWrapper.java:57)
com.cloudera.livy.rsc.driver.BypassJobWrapper.call(BypassJobWrapper.java:42)
com.cloudera.livy.rsc.driver.BypassJobWrapper.call(BypassJobWrapper.java:27)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
java.lang.Thread.run(Thread.java:745)