FailedConsole Output

Skipping 14,230 KB.. Full Log
unSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at workflow.EstimatorSuite.run(EstimatorSuite.scala:8)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4041. Attempting port 4042.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4042
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4042.
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4042
[pool-4-thread-1] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-1] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44262.
[pool-4-thread-1] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 44262
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:44262 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 44262)
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-1] INFO workflow.ConcretePipeline - Fitting '$anonfun$1$$anon$1' [1]
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Starting job: first at EstimatorSuite.scala:14
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (first at EstimatorSuite.scala:14) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(first at EstimatorSuite.scala:14)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (ParallelCollectionRDD[0] at parallelize at EstimatorSuite.scala:19), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1288) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 1288.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(869) called with curMem=1288, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 869.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:44262 (size: 869.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (ParallelCollectionRDD[0] at parallelize at EstimatorSuite.scala:19)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2037 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 902 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 3 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (first at EstimatorSuite.scala:14) finished in 0.003 s
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: first at EstimatorSuite.scala:14, took 0.006833 s
[pool-4-thread-1] INFO workflow.ConcretePipeline - Finished fitting '$anonfun$1$$anon$1' [1]
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Starting job: collect at EstimatorSuite.scala:23
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 1 (collect at EstimatorSuite.scala:23) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 1(collect at EstimatorSuite.scala:23)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 1 (MapPartitionsRDD[2] at map at Transformer.scala:56), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1960) called with curMem=2157, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1 stored as values in memory (estimated size 1960.0 B, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1235) called with curMem=4117, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_1_piece0 stored as bytes in memory (estimated size 1235.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_1_piece0 in memory on localhost:44262 (size: 1235.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 1 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 1 (MapPartitionsRDD[2] at map at Transformer.scala:56)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 1.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 1.0 (TID 1, localhost, PROCESS_LOCAL, 2037 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 1.0 (TID 1)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 1.0 (TID 1). 910 bytes result sent to driver
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 1.0 (TID 1) in 3 ms on localhost (1/1)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 1 (collect at EstimatorSuite.scala:23) finished in 0.003 s
[task-result-getter-1] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 1.0, whose tasks have all completed, from pool 
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Job 1 finished: collect at EstimatorSuite.scala:23, took 0.006144 s
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4042
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] EstimatorSuite:
[info] - estimator withData
[sparkDriver-akka.actor.default-dispatcher-15] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-15] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-13] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[info] RandomPatcherSuite:
[info] - patch dimensions, number
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-5] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:46859]
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 46859.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-1] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-2eb9105d-c2bf-4868-9f11-37b1e941cf95
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-1] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-9f9beee7-2790-4570-b6c5-e112b862a431/httpd-2be08f10-c5b6-47c5-a98c-5058d4d962db
[pool-4-thread-1] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:35569
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 35569.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply$mcV$sp(StringUtilsSuite.scala:10)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@113d406d: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply$mcV$sp(StringUtilsSuite.scala:10)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4041: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply$mcV$sp(StringUtilsSuite.scala:10)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@5ddd8bf7: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply$mcV$sp(StringUtilsSuite.scala:10)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at nodes.nlp.StringUtilsSuite$$anonfun$1.apply(StringUtilsSuite.scala:9)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4041. Attempting port 4042.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4042
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4042.
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4042
[pool-4-thread-1] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-1] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 44072.
[pool-4-thread-1] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 44072
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:44072 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 44072)
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:11
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:11) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:11)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2248) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.2 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1407) called with curMem=2248, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1407.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:44072 (size: 1407.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-5] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 976 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 4 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:11) finished in 0.004 s
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:11, took 0.010554 s
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4042
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-2] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-3] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:35571]
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 35571.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-1] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-ea106c1f-e8fa-4b5f-8f60-bbc3ac955abf
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-1] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-9f9beee7-2790-4570-b6c5-e112b862a431/httpd-4fb7dc59-ce96-4d47-874d-bfd90562a29f
[pool-4-thread-1] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:33648
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 33648.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply$mcV$sp(StringUtilsSuite.scala:16)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@5b8924e3: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply$mcV$sp(StringUtilsSuite.scala:16)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4041: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply$mcV$sp(StringUtilsSuite.scala:16)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@74e7a740: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply$mcV$sp(StringUtilsSuite.scala:16)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at nodes.nlp.StringUtilsSuite$$anonfun$2.apply(StringUtilsSuite.scala:15)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4041. Attempting port 4042.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4042
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4042.
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4042
[pool-4-thread-1] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-1] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 54052.
[pool-4-thread-1] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 54052
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:54052 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 54052)
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:17
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:17) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:17)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2448) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.4 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1560) called with curMem=2448, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1560.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:54052 (size: 1560.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-3] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 981 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 4 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:17) finished in 0.006 s
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:17, took 0.008661 s
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4042
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-15] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-13] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Running Spark version 1.5.2
[sparkDriver-akka.actor.default-dispatcher-15] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-15] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing view acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - Changing modify acls to: jenkins
[pool-4-thread-1] INFO org.apache.spark.SecurityManager - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); users with modify permissions: Set(jenkins)
[sparkDriver-akka.actor.default-dispatcher-15] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[sparkDriver-akka.actor.default-dispatcher-2] INFO akka.event.slf4j.Slf4jLogger - Slf4jLogger started
[sparkDriver-akka.actor.default-dispatcher-4] INFO Remoting - Starting remoting
[sparkDriver-akka.actor.default-dispatcher-4] INFO Remoting - Remoting started; listening on addresses :[akka.tcp://sparkDriver@localhost:43461]
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'sparkDriver' on port 43461.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering MapOutputTracker
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering BlockManagerMaster
[pool-4-thread-1] INFO org.apache.spark.storage.DiskBlockManager - Created local directory at /tmp/blockmgr-c3805a74-d648-4c4c-9e15-5969c6dde5fb
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore started with capacity 1920.0 MB
[pool-4-thread-1] INFO org.apache.spark.HttpFileServer - HTTP File server directory is /tmp/spark-9f9beee7-2790-4570-b6c5-e112b862a431/httpd-9deeea90-a02f-4c29-a2cc-f0e402937194
[pool-4-thread-1] INFO org.apache.spark.HttpServer - Starting HTTP Server
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SocketConnector@0.0.0.0:57097
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'HTTP file server' on port 57097.
[pool-4-thread-1] INFO org.apache.spark.SparkEnv - Registering OutputCommitCoordinator
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4040: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply$mcV$sp(StringUtilsSuite.scala:22)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@79d9782: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply$mcV$sp(StringUtilsSuite.scala:22)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED SelectChannelConnector@0.0.0.0:4041: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply$mcV$sp(StringUtilsSuite.scala:22)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] WARN org.spark-project.jetty.util.component.AbstractLifeCycle - FAILED org.spark-project.jetty.server.Server@2e3cd35d: java.net.BindException: Address already in use
java.net.BindException: Address already in use
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
	at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
	at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.spark-project.jetty.server.Server.doStart(Server.java:293)
	at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
	at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:236)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.ui.JettyUtils$$anonfun$3.apply(JettyUtils.scala:246)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:246)
	at org.apache.spark.ui.WebUI.bind(WebUI.scala:136)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:474)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:474)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:162)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply$mcV$sp(StringUtilsSuite.scala:22)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at nodes.nlp.StringUtilsSuite$$anonfun$3.apply(StringUtilsSuite.scala:21)
	at org.scalatest.FunSuite$$anon$1.apply(FunSuite.scala:1265)
	at org.scalatest.Suite$class.withFixture(Suite.scala:1974)
	at nodes.nlp.StringUtilsSuite.withFixture(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$class.invokeWithFixture$1(FunSuite.scala:1262)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.FunSuite$$anonfun$runTest$1.apply(FunSuite.scala:1271)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:198)
	at org.scalatest.FunSuite$class.runTest(FunSuite.scala:1271)
	at nodes.nlp.StringUtilsSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(StringUtilsSuite.scala:7)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:167)
	at nodes.nlp.StringUtilsSuite.runTest(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.FunSuite$$anonfun$runTests$1.apply(FunSuite.scala:1304)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:260)
	at org.scalatest.SuperEngine$$anonfun$org$scalatest$SuperEngine$$runTestsInBranch$1.apply(Engine.scala:249)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:249)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:326)
	at org.scalatest.FunSuite$class.runTests(FunSuite.scala:1304)
	at nodes.nlp.StringUtilsSuite.runTests(StringUtilsSuite.scala:7)
	at org.scalatest.Suite$class.run(Suite.scala:2303)
	at nodes.nlp.StringUtilsSuite.org$scalatest$FunSuite$$super$run(StringUtilsSuite.scala:7)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.FunSuite$$anonfun$run$1.apply(FunSuite.scala:1310)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:362)
	at org.scalatest.FunSuite$class.run(FunSuite.scala:1310)
	at nodes.nlp.StringUtilsSuite.run(StringUtilsSuite.scala:7)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:214)
	at sbt.RunnerWrapper$1.runRunner2(FrameworkWrapper.java:223)
	at sbt.RunnerWrapper$1.execute(FrameworkWrapper.java:236)
	at sbt.TestRunner.runTest$1(TestFramework.scala:84)
	at sbt.TestRunner.run(TestFramework.scala:94)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:219)
	at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:207)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:219)
	at sbt.TestFunction.apply(TestFramework.scala:224)
	at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:211)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:217)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:45)
	at sbt.std.Transform$$anon$4.work(System.scala:64)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
	at sbt.Execute.work(Execute.scala:244)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] WARN org.apache.spark.util.Utils - Service 'SparkUI' could not bind on port 4041. Attempting port 4042.
[pool-4-thread-1] INFO org.spark-project.jetty.server.Server - jetty-8.y.z-SNAPSHOT
[pool-4-thread-1] INFO org.spark-project.jetty.server.AbstractConnector - Started SelectChannelConnector@0.0.0.0:4042
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'SparkUI' on port 4042.
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Started SparkUI at http://localhost:4042
[pool-4-thread-1] WARN org.apache.spark.metrics.MetricsSystem - Using default name DAGScheduler for source because spark.app.id is not set.
[pool-4-thread-1] INFO org.apache.spark.executor.Executor - Starting executor ID driver on host localhost
[pool-4-thread-1] INFO org.apache.spark.util.Utils - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 42601.
[pool-4-thread-1] INFO org.apache.spark.network.netty.NettyBlockTransferService - Server created on 42601
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Trying to register BlockManager
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerMasterEndpoint - Registering block manager localhost:42601 with 1920.0 MB RAM, BlockManagerId(driver, localhost, 42601)
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - Registered BlockManager
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Starting job: collect at StringUtilsSuite.scala:23
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Got job 0 (collect at StringUtilsSuite.scala:23) with 1 output partitions
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Final stage: ResultStage 0(collect at StringUtilsSuite.scala:23)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Parents of final stage: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Missing parents: List()
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27), which has no missing parents
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(2328) called with curMem=0, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0 stored as values in memory (estimated size 2.3 KB, free 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - ensureFreeSpace(1447) called with curMem=2328, maxMem=2013234462
[dag-scheduler-event-loop] INFO org.apache.spark.storage.MemoryStore - Block broadcast_0_piece0 stored as bytes in memory (estimated size 1447.0 B, free 1920.0 MB)
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.storage.BlockManagerInfo - Added broadcast_0_piece0 in memory on localhost:42601 (size: 1447.0 B, free: 1920.0 MB)
[dag-scheduler-event-loop] INFO org.apache.spark.SparkContext - Created broadcast 0 from broadcast at DAGScheduler.scala:861
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at Transformer.scala:27)
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Adding task set 0.0 with 1 tasks
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.TaskSetManager - Starting task 0.0 in stage 0.0 (TID 0, localhost, PROCESS_LOCAL, 2153 bytes)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Running task 0.0 in stage 0.0 (TID 0)
[Executor task launch worker-0] INFO org.apache.spark.executor.Executor - Finished task 0.0 in stage 0.0 (TID 0). 1192 bytes result sent to driver
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSetManager - Finished task 0.0 in stage 0.0 (TID 0) in 6 ms on localhost (1/1)
[task-result-getter-0] INFO org.apache.spark.scheduler.TaskSchedulerImpl - Removed TaskSet 0.0, whose tasks have all completed, from pool 
[dag-scheduler-event-loop] INFO org.apache.spark.scheduler.DAGScheduler - ResultStage 0 (collect at StringUtilsSuite.scala:23) finished in 0.007 s
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Job 0 finished: collect at StringUtilsSuite.scala:23, took 0.011404 s
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/api,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/static,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/executors,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/environment,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/storage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/stages,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
[pool-4-thread-1] INFO org.spark-project.jetty.server.handler.ContextHandler - stopped o.s.j.s.ServletContextHandler{/jobs,null}
[pool-4-thread-1] INFO org.apache.spark.ui.SparkUI - Stopped Spark web UI at http://localhost:4042
[pool-4-thread-1] INFO org.apache.spark.scheduler.DAGScheduler - Stopping DAGScheduler
[sparkDriver-akka.actor.default-dispatcher-14] INFO org.apache.spark.MapOutputTrackerMasterEndpoint - MapOutputTrackerMasterEndpoint stopped!
[pool-4-thread-1] INFO org.apache.spark.storage.MemoryStore - MemoryStore cleared
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManager - BlockManager stopped
[pool-4-thread-1] INFO org.apache.spark.storage.BlockManagerMaster - BlockManagerMaster stopped
[sparkDriver-akka.actor.default-dispatcher-4] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint - OutputCommitCoordinator stopped!
[pool-4-thread-1] INFO org.apache.spark.SparkContext - Successfully stopped SparkContext
[info] StringUtilsSuite:
[info] - trim
[info] - lower case
[info] - tokenizer
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Shutting down remote daemon.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remote daemon shut down; proceeding with flushing remote transports.
[sparkDriver-akka.actor.default-dispatcher-3] INFO akka.remote.RemoteActorRefProvider$RemotingTerminator - Remoting shut down.
[error] Failed: Total 193, Failed 2, Errors 0, Passed 191
[error] Failed tests:
[error] 	nodes.images.ConvolverSuite
[error] (test:test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 592 s, completed May 17, 2016 10:59:06 AM
[Thread-4] INFO org.apache.spark.util.ShutdownHookManager - Shutdown hook called
[Thread-4] INFO org.apache.spark.util.ShutdownHookManager - Deleting directory /tmp/spark-9f9beee7-2790-4570-b6c5-e112b862a431
Build step 'Execute shell' marked build as failure
Sending e-mails to: sparks@cs.berkeley.edu shivaram@cs.berkeley.edu tomerk11@berkeley.edu vaishaal@berkeley.edu
Finished: FAILURE