FailedConsole Output

Skipping 29,616 KB.. Full Log
biIQT2jNKU4P0+vIKc0PTOvWC8xrzgzOT8nv0gvODO3ICfVoyQ3xy+/JNU2Yj/Tagmf50wMjD4M7CWJ6SCJEgYhn6zEskT9nMS8dP3gkqLMvHTriiIGKaihyfl5xfk5qXrOEBpkDgMEMDIxMFQUlDCw2+gXFyTm2QEAI9P8iI4AAAA=
[info]   at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
[info]   at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
[info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
[info]   at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
[info]   at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
[info]   at scala.collection.immutable.List.foreach(List.scala:392)
[info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
[info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
[info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
[info]   at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
[info]   at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
[info]   at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
[info]   at org.scalatest.Suite.run(Suite.scala:1124)
[info]   at org.scalatest.Suite.run$(Suite.scala:1106)
[info]   at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
[info]   at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
[info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
[info]   at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
[info]   at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
[info]   at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
11:36:37.880 WARN org.apache.spark.sql.execution.ui.SQLAppStatusListenerMemoryLeakSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.ui.SQLAppStatusListenerMemoryLeakSuite, thread names: rpc-boss-2123-1 =====

[info] ShufflePartitionsUtilSuite:
[info] - 1 shuffle (2 milliseconds)
[info] - 2 shuffles (1 millisecond)
[info] - enforce minimal number of coalesced partitions (0 milliseconds)
[info] - splitSizeListByTargetSize (0 milliseconds)
[info] DatasetCacheSuite:
11:36:37.910 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.execution.ui.SQLAppStatusListenerMemoryLeakSuite.$anonfun$new$38(SQLAppStatusListenerSuite.scala:680)
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
org.scalatest.Transformer.apply(Transformer.scala:22)
org.scalatest.Transformer.apply(Transformer.scala:20)
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
scala.collection.immutable.List.foreach(List.scala:392)
11:36:37.939 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
	at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
	at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
	at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
	at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
	at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
	at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
	at org.apache.spark.sql.DatasetCacheSuite.createSparkSession(DatasetCacheSuite.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
	at org.apache.spark.sql.DatasetCacheSuite.initializeSession(DatasetCacheSuite.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
	at org.apache.spark.sql.DatasetCacheSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(DatasetCacheSuite.scala:30)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
	at org.apache.spark.sql.DatasetCacheSuite.beforeAll(DatasetCacheSuite.scala:30)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:36:37.943 WARN org.apache.spark.sql.DatasetCacheSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.DatasetCacheSuite, thread names: rpc-boss-2126-1 =====

[info] org.apache.spark.sql.DatasetCacheSuite *** ABORTED *** (37 milliseconds)
[info]   java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
[info]   at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
[info]   at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
[info]   at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
[info]   at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
[info]   at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
[info]   at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
[info]   at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
[info]   at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
[info]   at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
[info]   at org.apache.spark.sql.DatasetCacheSuite.createSparkSession(DatasetCacheSuite.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
[info]   at org.apache.spark.sql.DatasetCacheSuite.initializeSession(DatasetCacheSuite.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
[info]   at org.apache.spark.sql.DatasetCacheSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(DatasetCacheSuite.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
[info]   at org.apache.spark.sql.DatasetCacheSuite.beforeAll(DatasetCacheSuite.scala:30)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
[info] AggregatingAccumulatorSuite:
11:36:37.949 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
11:36:37.949 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
11:36:37.949 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.DatasetCacheSuite.beforeAll(DatasetCacheSuite.scala:30)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
11:36:37.949 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
11:36:37.978 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
	at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
	at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
	at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
	at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
	at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
	at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
	at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.createSparkSession(AggregatingAccumulatorSuite.scala:34)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
	at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.initializeSession(AggregatingAccumulatorSuite.scala:34)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
	at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(AggregatingAccumulatorSuite.scala:34)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
	at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.beforeAll(AggregatingAccumulatorSuite.scala:34)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:36:37.981 WARN org.apache.spark.sql.execution.AggregatingAccumulatorSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.AggregatingAccumulatorSuite, thread names: rpc-boss-2129-1 =====

[info] org.apache.spark.sql.execution.AggregatingAccumulatorSuite *** ABORTED *** (36 milliseconds)
[info]   java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
[info]   at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
[info]   at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
[info]   at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
[info]   at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
[info]   at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
[info]   at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
[info]   at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
[info]   at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
[info]   at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
[info]   at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.createSparkSession(AggregatingAccumulatorSuite.scala:34)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
[info]   at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.initializeSession(AggregatingAccumulatorSuite.scala:34)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
[info]   at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(AggregatingAccumulatorSuite.scala:34)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
[info]   at org.apache.spark.sql.execution.AggregatingAccumulatorSuite.beforeAll(AggregatingAccumulatorSuite.scala:34)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
[info] DataSourceAnalysisSuite:
[info] - convertStaticPartitions only handle INSERT having at least static partitions (caseSensitive: true) (1 millisecond)
[info] - Missing columns (caseSensitive: true) (1 millisecond)
[info] - Missing partitioning columns (caseSensitive: true) (4 milliseconds)
[info] - Wrong partitioning columns (caseSensitive: true) (2 milliseconds)
[info] - Static partitions need to appear before dynamic partitions (caseSensitive: true) (0 milliseconds)
[info] - All static partitions (caseSensitive: true) (2 milliseconds)
[info] - Static partition and dynamic partition (caseSensitive: true) (0 milliseconds)
[info] - convertStaticPartitions only handle INSERT having at least static partitions (caseSensitive: false) (0 milliseconds)
[info] - Missing columns (caseSensitive: false) (0 milliseconds)
[info] - Missing partitioning columns (caseSensitive: false) (0 milliseconds)
[info] - Wrong partitioning columns (caseSensitive: false) (1 millisecond)
[info] - Static partitions need to appear before dynamic partitions (caseSensitive: false) (0 milliseconds)
[info] - All static partitions (caseSensitive: false) (0 milliseconds)
[info] - Static partition and dynamic partition (caseSensitive: false) (0 milliseconds)
[info] OptimizeMetadataOnlyQuerySuite:
11:36:38.020 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.execution.AggregatingAccumulatorSuite.beforeAll(AggregatingAccumulatorSuite.scala:34)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
11:36:38.050 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
	at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
	at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
	at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
	at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
	at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
	at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
	at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.createSparkSession(OptimizeMetadataOnlyQuerySuite.scala:28)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
	at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.initializeSession(OptimizeMetadataOnlyQuerySuite.scala:28)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
	at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(OptimizeMetadataOnlyQuerySuite.scala:28)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
	at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.beforeAll(OptimizeMetadataOnlyQuerySuite.scala:32)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:36:38.056 WARN org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.OptimizeMetadataOnlyQuerySuite, thread names: rpc-boss-2132-1 =====

[info] org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite *** ABORTED *** (39 milliseconds)
[info]   java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
[info]   at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
[info]   at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
[info]   at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
[info]   at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
[info]   at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
[info]   at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
[info]   at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
[info]   at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
[info]   at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
[info]   at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.createSparkSession(OptimizeMetadataOnlyQuerySuite.scala:28)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
[info]   at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.initializeSession(OptimizeMetadataOnlyQuerySuite.scala:28)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
[info]   at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(OptimizeMetadataOnlyQuerySuite.scala:28)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
[info]   at org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.beforeAll(OptimizeMetadataOnlyQuerySuite.scala:32)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
[info] V2CommandsCaseSensitivitySuite:
11:36:38.067 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.beforeAll(OptimizeMetadataOnlyQuerySuite.scala:32)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
11:36:38.096 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
	at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
	at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
	at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
	at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
	at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
	at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
	at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.createSparkSession(V2CommandsCaseSensitivitySuite.scala:32)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
	at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.initializeSession(V2CommandsCaseSensitivitySuite.scala:32)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
	at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(V2CommandsCaseSensitivitySuite.scala:32)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
	at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.beforeAll(V2CommandsCaseSensitivitySuite.scala:32)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:36:38.101 WARN org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.connector.V2CommandsCaseSensitivitySuite, thread names: rpc-boss-2135-1 =====

[info] org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite *** ABORTED *** (39 milliseconds)
[info]   java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
[info]   at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
[info]   at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
[info]   at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
[info]   at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
[info]   at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
[info]   at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
[info]   at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
[info]   at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
[info]   at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
[info]   at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.createSparkSession(V2CommandsCaseSensitivitySuite.scala:32)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
[info]   at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.initializeSession(V2CommandsCaseSensitivitySuite.scala:32)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
[info]   at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(V2CommandsCaseSensitivitySuite.scala:32)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
[info]   at org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.beforeAll(V2CommandsCaseSensitivitySuite.scala:32)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
[info] JoinSuite:
11:36:38.119 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.beforeAll(V2CommandsCaseSensitivitySuite.scala:32)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
11:36:38.151 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
	at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
	at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
	at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
	at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
	at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
	at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
	at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
	at org.apache.spark.sql.JoinSuite.createSparkSession(JoinSuite.scala:40)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
	at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
	at org.apache.spark.sql.JoinSuite.initializeSession(JoinSuite.scala:40)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
	at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
	at org.apache.spark.sql.JoinSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(JoinSuite.scala:40)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
	at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
	at org.apache.spark.sql.JoinSuite.beforeAll(JoinSuite.scala:40)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:36:38.155 WARN org.apache.spark.sql.JoinSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.JoinSuite, thread names: rpc-boss-2138-1 =====

[info] org.apache.spark.sql.JoinSuite *** ABORTED *** (41 milliseconds)
[info]   java.lang.IllegalStateException: Shutdown hooks cannot be modified during shutdown.
[info]   at org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
[info]   at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
[info]   at org.apache.spark.storage.DiskBlockManager.addShutdownHook(DiskBlockManager.scala:157)
[info]   at org.apache.spark.storage.DiskBlockManager.<init>(DiskBlockManager.scala:54)
[info]   at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:190)
[info]   at org.apache.spark.SparkEnv$.create(SparkEnv.scala:393)
[info]   at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
[info]   at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
[info]   at org.apache.spark.SparkContext.<init>(SparkContext.scala:130)
[info]   at org.apache.spark.sql.test.TestSparkSession.<init>(TestSQLContext.scala:30)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession(SharedSparkSession.scala:100)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.createSparkSession$(SharedSparkSession.scala:98)
[info]   at org.apache.spark.sql.JoinSuite.createSparkSession(JoinSuite.scala:40)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession(SharedSparkSession.scala:114)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.initializeSession$(SharedSparkSession.scala:112)
[info]   at org.apache.spark.sql.JoinSuite.initializeSession(JoinSuite.scala:40)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll(SharedSparkSession.scala:122)
[info]   at org.apache.spark.sql.test.SharedSparkSessionBase.beforeAll$(SharedSparkSession.scala:121)
[info]   at org.apache.spark.sql.JoinSuite.org$apache$spark$sql$test$SharedSparkSession$$super$beforeAll(JoinSuite.scala:40)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll(SharedSparkSession.scala:45)
[info]   at org.apache.spark.sql.test.SharedSparkSession.beforeAll$(SharedSparkSession.scala:43)
[info]   at org.apache.spark.sql.JoinSuite.beforeAll(JoinSuite.scala:40)
[info]   at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
[info]   at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
[info]   at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
[info] DataFrameTimeWindowingSuite:
11:36:38.169 WARN org.apache.spark.SparkContext: Another SparkContext is being constructed (or threw an exception in its constructor). This may indicate an error, since only one SparkContext should be running in this JVM (see SPARK-2243). The other SparkContext was created at:
org.apache.spark.sql.JoinSuite.beforeAll(JoinSuite.scala:40)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
Recording test results
Finished: FAILURE