SuccessConsole Output

Skipping 11,872 KB.. Full Log
event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 163 (ParallelCollectionRDD[482] at parallelize at BlockWeightedLeastSquares.scala:305), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1248) called with curMem=100488, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_313 stored as values in memory (estimated size 1248.0 B, free 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(914) called with curMem=101736, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_313_piece0 stored as bytes in memory (estimated size 914.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_313_piece0 in memory on localhost:59157 (size: 914.0 B, free: 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 313 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 163 (ParallelCollectionRDD[482] at parallelize at BlockWeightedLeastSquares.scala:305)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 163.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 163.0 (TID 425, localhost, PROCESS_LOCAL, 2085 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 163.0 (TID 425)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 163.0 (TID 425). 915 bytes result sent to driver
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 163.0 (TID 425) in 867 ms on localhost (1/1)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 163.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 163 (foreach at BlockWeightedLeastSquares.scala:306) finished in 0.867 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 163 finished: foreach at BlockWeightedLeastSquares.scala:306, took 0.873139 s
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_304_piece0 on localhost:59157 in memory (size: 4.4 KB, free: 1919.9 MB)
[Spark Context Cleaner] INFO org.apache.spark.ContextCleaner - Cleaned accumulator 1741
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_309_piece0 on localhost:59157 in memory (size: 4.9 KB, free: 1919.9 MB)
[Spark Context Cleaner] INFO org.apache.spark.ContextCleaner - Cleaned accumulator 1742
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_311_piece0 on localhost:59157 in memory (size: 4.1 KB, free: 1919.9 MB)
[Spark Context Cleaner] INFO org.apache.spark.ContextCleaner - Cleaned accumulator 1743
[Spark Context Cleaner] INFO org.apache.spark.ContextCleaner - Cleaned accumulator 1744
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_312_piece0 on localhost:59157 in memory (size: 4.4 KB, free: 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_303_piece0 on localhost:59157 in memory (size: 914.0 B, free: 1920.0 MB)
[Spark Context Cleaner] INFO org.apache.spark.ContextCleaner - Cleaned accumulator 1740
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_298_piece0 on localhost:59157 in memory (size: 327.0 B, free: 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_297_piece0 on localhost:59157 in memory (size: 327.0 B, free: 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_296_piece0 on localhost:59157 in memory (size: 201.0 B, free: 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Removed broadcast_295_piece0 on localhost:59157 in memory (size: 327.0 B, free: 1920.0 MB)
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: count at BlockWeightedLeastSquaresSuite.scala:26
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Registering RDD 2 (mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:348)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 164 (count at BlockWeightedLeastSquaresSuite.scala:26) with 3 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 165(count at BlockWeightedLeastSquaresSuite.scala:26)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List(ShuffleMapStage 164)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List(ShuffleMapStage 164)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ShuffleMapStage 164 (MapPartitionsRDD[2] at mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:348), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2376) called with curMem=24548, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_314 stored as values in memory (estimated size 2.3 KB, free 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1511) called with curMem=26924, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_314_piece0 stored as bytes in memory (estimated size 1511.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_314_piece0 in memory on localhost:59157 (size: 1511.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 314 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 3 missing tasks from ShuffleMapStage 164 (MapPartitionsRDD[2] at mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:348)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 164.0 with 3 tasks
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 164.0 (TID 426, localhost, PROCESS_LOCAL, 2448 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 164.0 (TID 426)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_0 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 164.0 (TID 426). 2255 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 1.0 in stage 164.0 (TID 427, localhost, PROCESS_LOCAL, 2448 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 1.0 in stage 164.0 (TID 427)
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 164.0 (TID 426) in 6 ms on localhost (1/3)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_1 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 1.0 in stage 164.0 (TID 427). 2255 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 2.0 in stage 164.0 (TID 428, localhost, PROCESS_LOCAL, 2448 bytes)
[task-result-getter-3] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 1.0 in stage 164.0 (TID 427) in 5 ms on localhost (2/3)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 2.0 in stage 164.0 (TID 428)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_2 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 2.0 in stage 164.0 (TID 428). 2255 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 2.0 in stage 164.0 (TID 428) in 5 ms on localhost (3/3)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 164.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ShuffleMapStage 164 (mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:348) finished in 0.015 s
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - looking for newly runnable stages
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - running: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - waiting: Set(ResultStage 165)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - failed: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents for ResultStage 165: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 165 (MapPartitionsRDD[5] at mapPartitions at BlockWeightedLeastSquares.scala:353), which is now runnable
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2656) called with curMem=28435, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_315 stored as values in memory (estimated size 2.6 KB, free 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1626) called with curMem=31091, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_315_piece0 stored as bytes in memory (estimated size 1626.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_315_piece0 in memory on localhost:59157 (size: 1626.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 315 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 3 missing tasks from ResultStage 165 (MapPartitionsRDD[5] at mapPartitions at BlockWeightedLeastSquares.scala:353)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 165.0 with 3 tasks
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 165.0 (TID 429, localhost, PROCESS_LOCAL, 1901 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 165.0 (TID 429)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 165.0 (TID 429). 1203 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 1.0 in stage 165.0 (TID 430, localhost, PROCESS_LOCAL, 1901 bytes)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 165.0 (TID 429) in 7 ms on localhost (1/3)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 1.0 in stage 165.0 (TID 430)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 1.0 in stage 165.0 (TID 430). 1203 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 2.0 in stage 165.0 (TID 431, localhost, PROCESS_LOCAL, 1901 bytes)
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 1.0 in stage 165.0 (TID 430) in 5 ms on localhost (2/3)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 2.0 in stage 165.0 (TID 431)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 2.0 in stage 165.0 (TID 431). 1203 bytes result sent to driver
[task-result-getter-3] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 2.0 in stage 165.0 (TID 431) in 4 ms on localhost (3/3)
[task-result-getter-3] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 165.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 165 (count at BlockWeightedLeastSquaresSuite.scala:26) finished in 0.016 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 164 finished: count at BlockWeightedLeastSquaresSuite.scala:26, took 0.040394 s
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(368) called with curMem=32717, maxMem=2013234462
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - Block broadcast_316 stored as values in memory (estimated size 368.0 B, free 1919.9 MB)
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(473) called with curMem=33085, maxMem=2013234462
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - Block broadcast_316_piece0 stored as bytes in memory (estimated size 473.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_316_piece0 in memory on localhost:59157 (size: 473.0 B, free: 1920.0 MB)
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Created broadcast 316 from broadcast at BlockWeightedLeastSquaresSuite.scala:43
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(104) called with curMem=33558, maxMem=2013234462
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - Block broadcast_317 stored as values in memory (estimated size 104.0 B, free 1919.9 MB)
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(193) called with curMem=33662, maxMem=2013234462
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - Block broadcast_317_piece0 stored as bytes in memory (estimated size 193.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_317_piece0 in memory on localhost:59157 (size: 193.0 B, free: 1919.9 MB)
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Created broadcast 317 from broadcast at BlockWeightedLeastSquaresSuite.scala:44
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: reduce at BlockWeightedLeastSquaresSuite.scala:56
[dag-scheduler-event-loop] INFO org.apache.spark.MapOutputTrackerMaster - Size of output statuses for shuffle 0 is 160 bytes
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Registering RDD 7 (mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:358)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 165 (reduce at BlockWeightedLeastSquaresSuite.scala:56) with 3 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 168(reduce at BlockWeightedLeastSquaresSuite.scala:56)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List(ShuffleMapStage 166, ShuffleMapStage 167)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List(ShuffleMapStage 167)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ShuffleMapStage 167 (MapPartitionsRDD[7] at mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:358), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2992) called with curMem=33855, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_318 stored as values in memory (estimated size 2.9 KB, free 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1779) called with curMem=36847, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_318_piece0 stored as bytes in memory (estimated size 1779.0 B, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_318_piece0 in memory on localhost:59157 (size: 1779.0 B, free: 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 318 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 3 missing tasks from ShuffleMapStage 167 (MapPartitionsRDD[7] at mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:358)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 167.0 with 3 tasks
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 167.0 (TID 432, localhost, PROCESS_LOCAL, 3274 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 167.0 (TID 432)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_0_0 locally
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_0 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 167.0 (TID 432). 2255 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 1.0 in stage 167.0 (TID 433, localhost, PROCESS_LOCAL, 3274 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 1.0 in stage 167.0 (TID 433)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 167.0 (TID 432) in 5 ms on localhost (1/3)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_0_1 locally
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_1 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 1.0 in stage 167.0 (TID 433). 2255 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 2.0 in stage 167.0 (TID 434, localhost, PROCESS_LOCAL, 3274 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 2.0 in stage 167.0 (TID 434)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 1.0 in stage 167.0 (TID 433) in 5 ms on localhost (2/3)
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_0_2 locally
[Executor task launch worker-2] INFO org.apache.spark.storage.BlockManager - Found block rdd_1_2 locally
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 2.0 in stage 167.0 (TID 434). 2255 bytes result sent to driver
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 2.0 in stage 167.0 (TID 434) in 5 ms on localhost (3/3)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ShuffleMapStage 167 (mapPartitionsWithIndex at BlockWeightedLeastSquares.scala:358) finished in 0.015 s
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 167.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - looking for newly runnable stages
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - running: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - waiting: Set(ResultStage 168)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - failed: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents for ResultStage 168: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 168 (MapPartitionsRDD[488] at map at BlockWeightedLeastSquaresSuite.scala:47), which is now runnable
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(4920) called with curMem=38626, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_319 stored as values in memory (estimated size 4.8 KB, free 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2478) called with curMem=43546, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_319_piece0 stored as bytes in memory (estimated size 2.4 KB, free 1919.9 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_319_piece0 in memory on localhost:59157 (size: 2.4 KB, free: 1919.9 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 319 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 3 missing tasks from ResultStage 168 (MapPartitionsRDD[488] at map at BlockWeightedLeastSquaresSuite.scala:47)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 168.0 with 3 tasks
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 168.0 (TID 435, localhost, PROCESS_LOCAL, 2116 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 168.0 (TID 435)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 168.0 (TID 435). 1707 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 1.0 in stage 168.0 (TID 436, localhost, PROCESS_LOCAL, 2116 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 1.0 in stage 168.0 (TID 436)
[task-result-getter-3] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 168.0 (TID 435) in 7 ms on localhost (1/3)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 1.0 in stage 168.0 (TID 436). 1707 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 2.0 in stage 168.0 (TID 437, localhost, PROCESS_LOCAL, 2116 bytes)
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Running task 2.0 in stage 168.0 (TID 437)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 1.0 in stage 168.0 (TID 436) in 7 ms on localhost (2/3)
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 3 non-empty blocks out of 3 blocks
[Executor task launch worker-2] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 1 ms
[Executor task launch worker-2] INFO org.apache.spark.executor.Executor - Finished task 2.0 in stage 168.0 (TID 437). 1707 bytes result sent to driver
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 2.0 in stage 168.0 (TID 437) in 7 ms on localhost (3/3)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 168.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 168 (reduce at BlockWeightedLeastSquaresSuite.scala:56) finished in 0.019 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 165 finished: reduce at BlockWeightedLeastSquaresSuite.scala:56, took 0.045673 s
norm of gradient is 0.008125665854027573
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] BlockWeightedLeastSquaresSuite:
[info] - BlockWeighted solver solution should work with empty partitions
[info] - Per-class solver solution should match BlockWeighted solver
[info] - BlockWeighted solver solution should have zero gradient
[info] - BlockWeighted solver should work with 1 class only
[info] - BlockWeighted solver should work with nFeatures not divisible by blockSize
[info] - groupByClasses should work correctly
[sparkDriver-akka.actor.default-dispatcher-5] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-5] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-16] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-4] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-4] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:40632]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 40632.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-e2f552cd-444a-4c0b-9d14-cd62aa20ca67
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-9a0fcc95-8426-47cc-9267-ddbdd94df477
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:58927
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 58927.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43363.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 43363
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:43363 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 43363)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at MeanAveragePrecisionEvaluator.scala:63
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Registering RDD 3 (flatMap at MeanAveragePrecisionEvaluator.scala:32)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at MeanAveragePrecisionEvaluator.scala:63) with 4 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 1(collect at MeanAveragePrecisionEvaluator.scala:63)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List(ShuffleMapStage 0)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List(ShuffleMapStage 0)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ShuffleMapStage 0 (MapPartitionsRDD[3] at flatMap at MeanAveragePrecisionEvaluator.scala:32), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(3888) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 3.8 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2165) called with curMem=3888, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.1 KB, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:43363 (size: 2.1 KB, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[3] at flatMap at MeanAveragePrecisionEvaluator.scala:32)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2725 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1161 bytes result sent to driver
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ShuffleMapStage 0 (flatMap at MeanAveragePrecisionEvaluator.scala:32) finished in 0.020 s
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - looking for newly runnable stages
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - running: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - waiting: Set(ResultStage 1)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - failed: Set()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents for ResultStage 1: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 1 (MapPartitionsRDD[5] at map at MeanAveragePrecisionEvaluator.scala:42), which is now runnable
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(4632) called with curMem=6053, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1 stored as values in memory (estimated size 4.5 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2453) called with curMem=10685, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1_piece0 stored as bytes in memory (estimated size 2.4 KB, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_1_piece0 in memory on localhost:43363 (size: 2.4 KB, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 1 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 4 missing tasks from ResultStage 1 (MapPartitionsRDD[5] at map at MeanAveragePrecisionEvaluator.scala:42)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 1.0 with 4 tasks
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 20 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 1.0 (TID 1, localhost, PROCESS_LOCAL, 1901 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 1.0 (TID 1)
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 1 non-empty blocks out of 1 blocks
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 1.0 (TID 1). 1299 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 1.0 in stage 1.0 (TID 2, localhost, PROCESS_LOCAL, 1901 bytes)
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 1.0 (TID 1) in 30 ms on localhost (1/4)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 1.0 in stage 1.0 (TID 2)
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 1 non-empty blocks out of 1 blocks
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 1.0 in stage 1.0 (TID 2). 1299 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 2.0 in stage 1.0 (TID 3, localhost, PROCESS_LOCAL, 1901 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 2.0 in stage 1.0 (TID 3)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 1.0 in stage 1.0 (TID 2) in 8 ms on localhost (2/4)
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 1 non-empty blocks out of 1 blocks
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 2.0 in stage 1.0 (TID 3). 1299 bytes result sent to driver
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 3.0 in stage 1.0 (TID 4, localhost, PROCESS_LOCAL, 1901 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 3.0 in stage 1.0 (TID 4)
[task-result-getter-3] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 2.0 in stage 1.0 (TID 3) in 15 ms on localhost (3/4)
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Getting 1 non-empty blocks out of 1 blocks
[Executor task launch worker-0] INFO org.apache.spark.storage.ShuffleBlockFetcherIterator - Started 0 remote fetches in 0 ms
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 3.0 in stage 1.0 (TID 4). 1299 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 3.0 in stage 1.0 (TID 4) in 4 ms on localhost (4/4)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 1.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 1 (collect at MeanAveragePrecisionEvaluator.scala:63) finished in 0.056 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at MeanAveragePrecisionEvaluator.scala:63, took 0.085199 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-14] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-14] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] MeanAveragePrecisionSuite:
[info] - random map test
[sparkDriver-akka.actor.default-dispatcher-14] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-14] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-14] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[info] HogExtractorSuite:
[info] - Load an Image and compute Hog Features
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-5] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:38450]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 38450.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c70541f8-8112-4dd5-a686-8dcab79b510e
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-6f7f53f2-e62d-4744-aa13-9382224d7bb1
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:34448
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 34448.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 47534.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 47534
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:47534 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 47534)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlus.scala:90
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at KMeansPlusPlus.scala:90) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at KMeansPlusPlus.scala:90)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:16), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1328) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 1328.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(872) called with curMem=1328, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 872.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:47534 (size: 872.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:16)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2355 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1202 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 8 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at KMeansPlusPlus.scala:90) finished in 0.009 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at KMeansPlusPlus.scala:90, took 0.012766 s
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlus.scala:90
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 1 (collect at KMeansPlusPlus.scala:90) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 1(collect at KMeansPlusPlus.scala:90)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 1 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:16), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1328) called with curMem=2200, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1 stored as values in memory (estimated size 1328.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(872) called with curMem=3528, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1_piece0 stored as bytes in memory (estimated size 872.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_1_piece0 in memory on localhost:47534 (size: 872.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 1 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 1 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:16)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 1.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 1.0 (TID 1, localhost, PROCESS_LOCAL, 2355 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 1.0 (TID 1)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 1.0 (TID 1). 1202 bytes result sent to driver
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 1 (collect at KMeansPlusPlus.scala:90) finished in 0.004 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 1 finished: collect at KMeansPlusPlus.scala:90, took 0.009095 s
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 1 current cost 4.333333333333333 imp true
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 2 current cost 4.333333333333333 imp false
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlusSuite.scala:30
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 2 (collect at KMeansPlusPlusSuite.scala:30) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 2(collect at KMeansPlusPlusSuite.scala:30)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 2 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2664) called with curMem=4400, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_2 stored as values in memory (estimated size 2.6 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1665) called with curMem=7064, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_2_piece0 stored as bytes in memory (estimated size 1665.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_2_piece0 in memory on localhost:47534 (size: 1665.0 B, free: 1920.0 MB)
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 1.0 (TID 1) in 4 ms on localhost (1/1)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 2 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 2 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 2.0 with 1 tasks
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 1.0, whose tasks have all completed, from pool 
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 2.0 (TID 2, localhost, PROCESS_LOCAL, 2355 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 2.0 (TID 2)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 2.0 (TID 2). 1154 bytes result sent to driver
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 2.0 (TID 2) in 4 ms on localhost (1/1)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 2 (collect at KMeansPlusPlusSuite.scala:30) finished in 0.005 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 2 finished: collect at KMeansPlusPlusSuite.scala:30, took 0.008267 s
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 2.0, whose tasks have all completed, from pool 
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[sparkDriver-akka.actor.default-dispatcher-16] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-16] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-16] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:57475]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 57475.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-9e2b1326-2391-4b57-b1d6-0aea16b46689
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-89e54458-3085-4186-b12d-3799dbc516c5
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:57680
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 57680.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 57783.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 57783
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:57783 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 57783)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlus.scala:90
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at KMeansPlusPlus.scala:90) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at KMeansPlusPlus.scala:90)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:38), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1328) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 1328.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(872) called with curMem=1328, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 872.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:57783 (size: 872.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:38)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2407 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1254 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 14 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at KMeansPlusPlus.scala:90) finished in 0.014 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at KMeansPlusPlus.scala:90, took 0.022304 s
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 1 current cost 0.5 imp true
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 2 current cost 0.5 imp false
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlus.scala:90
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 1 (collect at KMeansPlusPlus.scala:90) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 1(collect at KMeansPlusPlus.scala:90)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 1 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:38), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1328) called with curMem=2200, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1 stored as values in memory (estimated size 1328.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(872) called with curMem=3528, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1_piece0 stored as bytes in memory (estimated size 872.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_1_piece0 in memory on localhost:57783 (size: 872.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 1 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 1 (ParallelCollectionRDD[0] at parallelize at KMeansPlusPlusSuite.scala:38)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 1.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 1.0 (TID 1, localhost, PROCESS_LOCAL, 2407 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 1.0 (TID 1)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 1.0 (TID 1). 1254 bytes result sent to driver
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 1.0 (TID 1) in 6 ms on localhost (1/1)
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 1.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 1 (collect at KMeansPlusPlus.scala:90) finished in 0.005 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 1 finished: collect at KMeansPlusPlus.scala:90, took 0.013389 s
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 1 current cost 0.5 imp true
[pool-4-thread-5] INFO nodes.learning.KMeansPlusPlusEstimator - Iteration: 2 current cost 0.5 imp false
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlusSuite.scala:58
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 2 (collect at KMeansPlusPlusSuite.scala:58) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 2(collect at KMeansPlusPlusSuite.scala:58)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 2 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2688) called with curMem=4400, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_2 stored as values in memory (estimated size 2.6 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1681) called with curMem=7088, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_2_piece0 stored as bytes in memory (estimated size 1681.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_2_piece0 in memory on localhost:57783 (size: 1681.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 2 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 2 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 2.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 2.0 (TID 2, localhost, PROCESS_LOCAL, 2407 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 2.0 (TID 2)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 2.0 (TID 2). 1222 bytes result sent to driver
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 2.0 (TID 2) in 12 ms on localhost (1/1)
[task-result-getter-2] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 2.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 2 (collect at KMeansPlusPlusSuite.scala:58) finished in 0.011 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 2 finished: collect at KMeansPlusPlusSuite.scala:58, took 0.019439 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-6] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-6] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-4] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:57512]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 57512.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-703084d4-2cbf-4b7c-b408-e5b01be3bdd0
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-d5cafccc-0660-4a61-9f14-4705dcf39fe0
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:56582
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 56582.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 56807.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 56807
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:56807 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 56807)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at KMeansPlusPlusSuite.scala:92
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at KMeansPlusPlusSuite.scala:92) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at KMeansPlusPlusSuite.scala:92)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2616) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.6 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1636) called with curMem=2616, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1636.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:56807 (size: 1636.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at mapPartitions at KMeansPlusPlus.scala:63)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2407 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1222 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 6 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at KMeansPlusPlusSuite.scala:92) finished in 0.006 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at KMeansPlusPlusSuite.scala:92, took 0.011167 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] KMeansPlusPlusSuite:
[info] - K-Means++ Single Center
[info] - K-Means++ Two Centers
[info] - K-Means Transformer
[sparkDriver-akka.actor.default-dispatcher-4] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-4] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-4] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:37214]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 37214.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-ed977e28-f6c1-4871-8db2-63011b4915ab
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-92dd8cfd-f5f6-4e08-b892-79cc5acc7a1b
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:46638
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 46638.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 52622.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 52622
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:52622 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 52622)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO workflow.ConcretePipeline - Fitting '$anonfun$1$$anon$1' [1]
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: first at EstimatorSuite.scala:14
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (first at EstimatorSuite.scala:14) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(first at EstimatorSuite.scala:14)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (ParallelCollectionRDD[0] at parallelize at EstimatorSuite.scala:19), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1288) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 1288.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(869) called with curMem=1288, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 869.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:52622 (size: 869.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (ParallelCollectionRDD[0] at parallelize at EstimatorSuite.scala:19)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2037 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 902 bytes result sent to driver
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (first at EstimatorSuite.scala:14) finished in 0.022 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: first at EstimatorSuite.scala:14, took 0.027564 s
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 22 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[pool-4-thread-5] INFO workflow.ConcretePipeline - Finished fitting '$anonfun$1$$anon$1' [1]
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at EstimatorSuite.scala:23
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 1 (collect at EstimatorSuite.scala:23) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 1(collect at EstimatorSuite.scala:23)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 1 (MapPartitionsRDD[2] at map at Transformer.scala:56), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1960) called with curMem=2157, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1 stored as values in memory (estimated size 1960.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1235) called with curMem=4117, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1_piece0 stored as bytes in memory (estimated size 1235.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_1_piece0 in memory on localhost:52622 (size: 1235.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 1 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 1 (MapPartitionsRDD[2] at map at Transformer.scala:56)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 1.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 1.0 (TID 1, localhost, PROCESS_LOCAL, 2037 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 1.0 (TID 1)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 1.0 (TID 1). 910 bytes result sent to driver
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 1.0 (TID 1) in 3 ms on localhost (1/1)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 1 (collect at EstimatorSuite.scala:23) finished in 0.004 s
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 1.0, whose tasks have all completed, from pool 
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 1 finished: collect at EstimatorSuite.scala:23, took 0.007452 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-15] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] EstimatorSuite:
[info] - estimator withData
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-4] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[info] RandomPatcherSuite:
[info] - patch dimensions, number
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-4] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-5] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:34049]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 34049.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-de1ee0bd-1cb9-49cb-9f51-c2f6a9196861
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-ee7ee8db-b2c5-45bf-aed0-6f9f57da9487
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:52961
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 52961.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42645.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 42645
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:42645 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 42645)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:11
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:11) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:11)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2248) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.2 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1407) called with curMem=2248, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1407.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:42645 (size: 1407.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 976 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 13 ms on localhost (1/1)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:11) finished in 0.013 s
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:11, took 0.022457 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-5] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-5] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:45177]
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 45177.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-790ccac7-68a0-47c7-bb75-8a5f331e454b
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-dc2f410b-924d-4f04-b4f0-577c6c0183c0
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:55136
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 55136.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 59060.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 59060
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:59060 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 59060)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:17
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:17) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:17)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2448) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.4 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1560) called with curMem=2448, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1560.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:59060 (size: 1560.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 981 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 6 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:17) finished in 0.008 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:17, took 0.014518 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-5] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Starting remoting
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 58822.
[sparkDriver-akka.actor.default-dispatcher-2] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:58822]
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-5] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-8d5da505-6b3b-4d2e-883d-c9a0b9f2ed0c
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-5] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-50119b13-e072-4470-af20-ca691faafeba/httpd-0cc47f68-910b-4c8b-acc7-bfed88fbc3a0
[pool-4-thread-5] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:57433
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 57433.
[pool-4-thread-5] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-5] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-5] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4040
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4040.
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4040
[pool-4-thread-5] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-5] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-5] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 40347.
[pool-4-thread-5] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 40347
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:40347 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 40347)
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:23
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:23) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:23)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2328) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.3 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1447) called with curMem=2328, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1447.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:40347 (size: 1447.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1192 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 23 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:23) finished in 0.023 s
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:23, took 0.028544 s
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-5] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-5] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4040
[pool-4-thread-5] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-5] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-5] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-13] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-5] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] StringUtilsSuite:
[info] - trim
[info] - lower case
[info] - tokenizer
[sparkDriver-akka.actor.default-dispatcher-14] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-14] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[info] Passed: Total 191, Failed 0, Errors 0, Passed 191
[success] Total time: 473 s, completed May 6, 2016 3:06:30 PM
[Thread-4] INFO org.apache.spark.util.ShutdownHookManager - Shutdown hook called
[Thread-4] INFO org.apache.spark.util.ShutdownHookManager - Deleting directory /tmp/spark-50119b13-e072-4470-af20-ca691faafeba
Finished: SUCCESS