FailedConsole Output

Skipping 20,487 KB.. Full Log
nSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:34:00.029 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Table or view not found: view5; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view6
+- View (`default`.`view6`, [id#422378L,id1#422379L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view5]

org.apache.spark.sql.AnalysisException: Table or view not found: view5; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view6
+- View (`default`.`view6`, [id#422378L,id1#422379L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view5]

	at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:106)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:177)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:130)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:156)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:153)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:68)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:68)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:66)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:91)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:240)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$2(SQLTestUtils.scala:317)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$1(SQLTestUtils.scala:316)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:316)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:314)
	at org.apache.spark.sql.execution.SQLViewSuite.withView(SQLViewSuite.scala:31)
	at org.apache.spark.sql.execution.SQLViewSuite.$anonfun$new$184(SQLViewSuite.scala:681)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:34:00.039 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Table or view not found: view6; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view7
+- View (`default`.`view7`, [id#422380L,id1#422381L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view6]

org.apache.spark.sql.AnalysisException: Table or view not found: view6; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view7
+- View (`default`.`view7`, [id#422380L,id1#422381L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view6]

	at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:106)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:177)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:130)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:156)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:153)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:68)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:68)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:66)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:91)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:240)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$2(SQLTestUtils.scala:317)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$1(SQLTestUtils.scala:316)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:316)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:314)
	at org.apache.spark.sql.execution.SQLViewSuite.withView(SQLViewSuite.scala:31)
	at org.apache.spark.sql.execution.SQLViewSuite.$anonfun$new$184(SQLViewSuite.scala:681)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:34:00.052 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Table or view not found: view7; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view8
+- View (`default`.`view8`, [id#422382L,id1#422383L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view7]

org.apache.spark.sql.AnalysisException: Table or view not found: view7; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view8
+- View (`default`.`view8`, [id#422382L,id1#422383L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view7]

	at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:106)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:177)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:130)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:156)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:153)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:68)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:68)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:66)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:91)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:240)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$2(SQLTestUtils.scala:317)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$1(SQLTestUtils.scala:316)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:316)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:314)
	at org.apache.spark.sql.execution.SQLViewSuite.withView(SQLViewSuite.scala:31)
	at org.apache.spark.sql.execution.SQLViewSuite.$anonfun$new$184(SQLViewSuite.scala:681)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:34:00.062 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Table or view not found: view8; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view9
+- View (`default`.`view9`, [id#422384L,id1#422385L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view8]

org.apache.spark.sql.AnalysisException: Table or view not found: view8; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view9
+- View (`default`.`view9`, [id#422384L,id1#422385L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view8]

	at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:106)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:177)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:130)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:156)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:153)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:68)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:68)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:66)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:91)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:240)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$2(SQLTestUtils.scala:317)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$1(SQLTestUtils.scala:316)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:316)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:314)
	at org.apache.spark.sql.execution.SQLViewSuite.withView(SQLViewSuite.scala:31)
	at org.apache.spark.sql.execution.SQLViewSuite.$anonfun$new$184(SQLViewSuite.scala:681)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
11:34:00.076 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Table or view not found: view9; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view10
+- View (`default`.`view10`, [id#422386L,id1#422387L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view9]

org.apache.spark.sql.AnalysisException: Table or view not found: view9; line 1 pos 14;
'SubqueryAlias spark_catalog.default.view10
+- View (`default`.`view10`, [id#422386L,id1#422387L])
   +- 'Project [*]
      +- 'UnresolvedRelation [view9]

	at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:106)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:177)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$foreachUp$1$adapted(TreeNode.scala:176)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:176)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:130)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:156)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:153)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:68)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:68)
	at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:66)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:91)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:240)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3616)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3614)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:229)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:607)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:602)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$2(SQLTestUtils.scala:317)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withView$1(SQLTestUtils.scala:316)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:316)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:314)
	at org.apache.spark.sql.execution.SQLViewSuite.withView(SQLViewSuite.scala:31)
	at org.apache.spark.sql.execution.SQLViewSuite.$anonfun$new$184(SQLViewSuite.scala:681)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - restrict the nested level of a view (466 milliseconds)
[info] - permanent view should be case-preserving (398 milliseconds)
[info] - sparkSession API view resolution with different default database (1 second, 82 milliseconds)
[info] - SPARK-23519 view should be created even when query output contains duplicate col name (1 second, 872 milliseconds)
11:34:03.495 WARN org.apache.spark.sql.execution.SimpleSQLViewSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.SimpleSQLViewSuite, thread names: rpc-boss-1847-1, shuffle-boss-1850-1 =====

[info] ApproxCountDistinctForIntervalsQuerySuite:
[info] - binary logistic regression with intercept without regularization with bound (17 seconds, 226 milliseconds)
[info] - binary logistic regression without intercept without regularization (3 seconds, 585 milliseconds)
11:34:10.727 WARN org.apache.spark.scheduler.TaskSetManager: Stage 0 contains a task of very large size (1618 KiB). The maximum recommended task size is 1000 KiB.
[info] - test ApproxCountDistinctForIntervals with large number of endpoints (7 seconds, 608 milliseconds)
11:34:11.304 WARN org.apache.spark.sql.ApproxCountDistinctForIntervalsQuerySuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.ApproxCountDistinctForIntervalsQuerySuite, thread names: rpc-boss-1853-1, shuffle-boss-1856-1 =====

[info] MemorySinkSuite:
[info] - directly add data in Append output mode (1 second, 522 milliseconds)
[info] - binary logistic regression without intercept without regularization with bound (3 seconds, 177 milliseconds)
[info] - directly add data in Update output mode (1 second, 125 milliseconds)
[info] - directly add data in Complete output mode (1 second, 153 milliseconds)
11:34:16.031 WARN org.apache.spark.sql.streaming.StreamingQueryManager: Temporary checkpoint location created which is deleted normally when the query didn't fail: /home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/temporary-e0319834-b501-41ae-a5a1-d8015ddd4515. If it's required to delete it under any circumstances, please set spark.sql.streaming.forceDeleteTempCheckpointLocation to true. Important to know deleting temp checkpoint folder is best effort.
[info] - registering as a table in Append output mode (1 second, 117 milliseconds)
11:34:16.567 WARN org.apache.spark.sql.streaming.StreamingQueryManager: Temporary checkpoint location created which is deleted normally when the query didn't fail: /home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/temporary-1ba88823-bacd-4a3f-8551-d7e21b5a945b. If it's required to delete it under any circumstances, please set spark.sql.streaming.forceDeleteTempCheckpointLocation to true. Important to know deleting temp checkpoint folder is best effort.
[info] - registering as a table in Complete output mode (2 seconds, 338 milliseconds)
11:34:18.901 WARN org.apache.spark.sql.streaming.StreamingQueryManager: Temporary checkpoint location created which is deleted normally when the query didn't fail: /home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/temporary-4df28336-9641-465d-8ce0-713be67421d2. If it's required to delete it under any circumstances, please set spark.sql.streaming.forceDeleteTempCheckpointLocation to true. Important to know deleting temp checkpoint folder is best effort.
[info] - registering as a table in Update output mode (742 milliseconds)
[info] - MemoryPlan statistics (139 milliseconds)
[info] - stress test !!! IGNORED !!!
[info] - error when no name is specified (4 milliseconds)
[info] - error if attempting to resume specific checkpoint (434 milliseconds)
[info] - data writer (7 milliseconds)
[info] - streaming writer (100 milliseconds)
11:34:20.442 WARN org.apache.spark.sql.execution.streaming.MemorySinkSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.streaming.MemorySinkSuite, thread names: state-store-maintenance-task, rpc-boss-1859-1, block-manager-slave-async-thread-pool-96, block-manager-slave-async-thread-pool-29, shuffle-boss-1862-1, block-manager-slave-async-thread-pool-55, block-manager-slave-async-thread-pool-98, block-manager-slave-async-thread-pool-59, block-manager-slave-async-thread-pool-16, block-manager-slave-async-thread-pool-81, block-manager-slave-async-thread-pool-35 =====

[info] ColumnVectorSuite:
[info] - boolean (6 milliseconds)
[info] - byte (7 milliseconds)
[info] - short (5 milliseconds)
[info] - int (4 milliseconds)
[info] - date (4 milliseconds)
[info] - long (4 milliseconds)
[info] - timestamp (2 milliseconds)
[info] - float (2 milliseconds)
[info] - double (2 milliseconds)
[info] - string (3 milliseconds)
[info] - binary (3 milliseconds)
[info] - mutable ColumnarRow (2 milliseconds)
[info] - array (0 milliseconds)
[info] - struct (0 milliseconds)
[info] - [SPARK-22092] off-heap column vector reallocation corrupts array data (2 milliseconds)
[info] - [SPARK-22092] off-heap column vector reallocation corrupts struct nullability (1 millisecond)
[info] - CachedBatch boolean Apis (1 second, 176 milliseconds)
[info] - CachedBatch byte Apis (9 milliseconds)
[info] - CachedBatch short Apis (5 milliseconds)
[info] - CachedBatch int Apis (5 milliseconds)
[info] - CachedBatch long Apis (10 milliseconds)
[info] - CachedBatch float Apis (8 milliseconds)
[info] - CachedBatch double Apis (395 milliseconds)
[info] NullableColumnAccessorSuite:
[info] - Nullable NULL column accessor: empty column (30 milliseconds)
[info] - Nullable NULL column accessor: access null values (29 milliseconds)
[info] - Nullable BOOLEAN column accessor: empty column (1 millisecond)
[info] - Nullable BOOLEAN column accessor: access null values (23 milliseconds)
[info] - Nullable BYTE column accessor: empty column (1 millisecond)
[info] - Nullable BYTE column accessor: access null values (27 milliseconds)
[info] - Nullable SHORT column accessor: empty column (1 millisecond)
[info] - Nullable SHORT column accessor: access null values (18 milliseconds)
[info] - Nullable INT column accessor: empty column (0 milliseconds)
[info] - Nullable INT column accessor: access null values (1 millisecond)
[info] - Nullable LONG column accessor: empty column (0 milliseconds)
[info] - Nullable LONG column accessor: access null values (1 millisecond)
[info] - Nullable FLOAT column accessor: empty column (0 milliseconds)
[info] - Nullable FLOAT column accessor: access null values (24 milliseconds)
[info] - Nullable DOUBLE column accessor: empty column (1 millisecond)
[info] - Nullable DOUBLE column accessor: access null values (20 milliseconds)
[info] - Nullable STRING column accessor: empty column (1 millisecond)
[info] - Nullable STRING column accessor: access null values (2 milliseconds)
[info] - Nullable BINARY column accessor: empty column (1 millisecond)
[info] - Nullable BINARY column accessor: access null values (2 milliseconds)
[info] - Nullable COMPACT_DECIMAL column accessor: empty column (0 milliseconds)
[info] - Nullable COMPACT_DECIMAL column accessor: access null values (23 milliseconds)
[info] - Nullable LARGE_DECIMAL column accessor: empty column (1 millisecond)
[info] - Nullable LARGE_DECIMAL column accessor: access null values (18 milliseconds)
[info] - Nullable STRUCT column accessor: empty column (0 milliseconds)
[info] - Nullable STRUCT column accessor: access null values (15 milliseconds)
[info] - Nullable ARRAY column accessor: empty column (1 millisecond)
[info] - Nullable ARRAY column accessor: access null values (20 milliseconds)
[info] - Nullable MAP column accessor: empty column (3 milliseconds)
[info] - Nullable MAP column accessor: access null values (23 milliseconds)
[info] - Nullable CALENDAR_INTERVAL column accessor: empty column (0 milliseconds)
[info] - Nullable CALENDAR_INTERVAL column accessor: access null values (19 milliseconds)
[info] RateStreamProviderSuite:
[info] - RateStreamProvider in registry (27 milliseconds)
[info] - compatible with old path in registry (3 milliseconds)
[info] - binary logistic regression with intercept with L1 regularization (10 seconds, 842 milliseconds)
[info] - microbatch - basic (2 seconds, 31 milliseconds)
[info] - microbatch - restart (4 seconds, 603 milliseconds)
[info] - microbatch - uniform distribution of event timestamps (1 second, 286 milliseconds)
[info] - microbatch - infer offsets (55 milliseconds)
[info] - microbatch - predetermined batch size (44 milliseconds)
[info] - microbatch - data read (30 milliseconds)
[info] - valueAtSecond (0 milliseconds)
[info] - rampUpTime (2 seconds, 469 milliseconds)
[info] - numPartitions (1 second, 929 milliseconds)
[info] - overflow (270 milliseconds)
[info] - illegal option values (7 milliseconds)
[info] - user-specified schema given (5 milliseconds)
[info] - binary logistic regression without intercept with L1 regularization (12 seconds, 93 milliseconds)
[info] - continuous data (1 second, 3 milliseconds)
11:34:36.394 WARN org.apache.spark.sql.execution.streaming.sources.RateStreamProviderSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.streaming.sources.RateStreamProviderSuite, thread names: state-store-maintenance-task, shuffle-boss-1868-1, rpc-boss-1865-1 =====

[info] ArrowWriterSuite:
[info] - simple (253 milliseconds)
[info] - get multiple (28 milliseconds)
[info] - array (4 milliseconds)
[info] - nested array (4 milliseconds)
[info] - struct (5 milliseconds)
[info] - nested struct (5 milliseconds)
[info] - map (193 milliseconds)
[info] - empty map (6 milliseconds)
[info] - nested map (7 milliseconds)
[info] RowDataSourceStrategySuite:
[info] - SPARK-17673: Exchange reuse respects differences in output schema (3 seconds, 960 milliseconds)
11:34:41.269 WARN org.apache.spark.sql.execution.datasources.RowDataSourceStrategySuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.datasources.RowDataSourceStrategySuite, thread names: shuffle-boss-1874-1, rpc-boss-1871-1 =====

[info] CatalogSuite:
[info] - current database (28 milliseconds)
[info] - list databases (179 milliseconds)
[info] - list tables (183 milliseconds)
[info] - list tables with database (270 milliseconds)
[info] - list functions (290 milliseconds)
[info] - binary logistic regression with intercept with L2 regularization (6 seconds, 453 milliseconds)
[info] - list functions with database (820 milliseconds)
[info] - list columns (127 milliseconds)
[info] - list columns in temporary table (9 milliseconds)
[info] - list columns in database (59 milliseconds)
[info] - Database.toString (3 milliseconds)
[info] - Table.toString (1 millisecond)
[info] - Function.toString (50 milliseconds)
[info] - Column.toString (3 milliseconds)
[info] - catalog classes format in Dataset.show (456 milliseconds)
[info] - dropTempView should not un-cache and drop metastore table if a same-name table exists (525 milliseconds)
[info] - get database (33 milliseconds)
[info] - get table (70 milliseconds)
[info] - get function (47 milliseconds)
[info] - database exists (1 millisecond)
[info] - table exists (89 milliseconds)
[info] - function exists (94 milliseconds)
[info] - createTable with 'path' in options (294 milliseconds)
[info] - createTable without 'path' in options (259 milliseconds)
[info] - clone Catalog (130 milliseconds)
[info] - cacheTable with storage level (12 milliseconds)
11:34:45.647 WARN org.apache.spark.sql.internal.CatalogSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.internal.CatalogSuite, thread names: rpc-boss-1877-1, shuffle-boss-1880-1 =====

[info] SQLContextSuite:
[info] - binary logistic regression with intercept with L2 regularization with bound (3 seconds, 23 milliseconds)
[info] - getOrCreate instantiates SQLContext (4 milliseconds)
[info] - getOrCreate return the original SQLContext (99 milliseconds)
== Physical Plan ==
*(1) Project [myadd(1, 2) AS myadd(1, 2)#424314]
+- *(1) Scan OneRowRelation[]


[info] - Sessions of SQLContext (51 milliseconds)
[info] - Catalyst optimization passes are modifiable at runtime (58 milliseconds)
[info] - get all tables (305 milliseconds)
[info] - getting all tables with a database name has no impact on returned table names (181 milliseconds)
[info] - query the returned DataFrame of tables (422 milliseconds)
11:34:47.067 WARN org.apache.spark.sql.SQLContextSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.SQLContextSuite, thread names: shuffle-boss-1886-1, rpc-boss-1883-1 =====

[info] ArrowColumnVectorSuite:
[info] - boolean (6 milliseconds)
[info] - byte (4 milliseconds)
[info] - short (6 milliseconds)
[info] - int (6 milliseconds)
[info] - long (7 milliseconds)
[info] - float (6 milliseconds)
[info] - double (6 milliseconds)
[info] - string (9 milliseconds)
[info] - binary (7 milliseconds)
[info] - array (4 milliseconds)
[info] - non nullable struct (5 milliseconds)
[info] - struct (6 milliseconds)
11:34:47.176 WARN org.apache.spark.sql.SparkSession: An existing Spark session exists as the active or default session.
This probably means another suite leaked it. Attempting to stop it before continuing.
This existing Spark session was created at:

org.apache.spark.sql.SQLContextSuite.$anonfun$new$2(SQLContextSuite.scala:43)
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
org.scalatest.Transformer.apply(Transformer.scala:22)
org.scalatest.Transformer.apply(Transformer.scala:20)
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151)
org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58)
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58)
org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
scala.collection.immutable.List.foreach(List.scala:392)

         
[info] DataFrameTungstenSuite:
[info] - test simple types (191 milliseconds)
[info] - test struct type (242 milliseconds)
[info] - test nested struct type (208 milliseconds)
[info] - binary logistic regression without intercept with L2 regularization (2 seconds, 289 milliseconds)
[info] - primitive data type accesses in persist data (269 milliseconds)
[info] - access cache multiple times (941 milliseconds)
[info] - access only some column of the all of columns (400 milliseconds)
11:34:49.618 WARN org.apache.spark.sql.DataFrameTungstenSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.DataFrameTungstenSuite, thread names: rpc-boss-1889-1, shuffle-boss-1892-1 =====

[info] CSVReadSchemaSuite:
[info] - binary logistic regression without intercept with L2 regularization with bound (1 second, 858 milliseconds)
[info] - append column at the end (938 milliseconds)
[info] - hide column at the end (784 milliseconds)
[info] - change column type from byte to short/int/long (1 second, 191 milliseconds)
[info] - change column type from short to int/long (790 milliseconds)
[info] - change column type from int to long (807 milliseconds)
[info] - read byte, int, short, long together (1 second, 562 milliseconds)
[info] - change column type from float to double (923 milliseconds)
[info] - read float and double together (1 second, 1 millisecond)
[info] - change column type from float to decimal (417 milliseconds)
[info] - change column type from double to decimal (594 milliseconds)
[info] - read float, double, decimal together (1 second, 430 milliseconds)
[info] - read as string (2 seconds, 83 milliseconds)
[info] HeaderCSVReadSchemaSuite:
11:35:02.462 WARN org.apache.spark.sql.execution.datasources.CSVReadSchemaSuite: 

===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.execution.datasources.CSVReadSchemaSuite, thread names: shuffle-boss-1898-1, rpc-boss-1895-1 =====

11:35:05.329 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=two/part-00000-0bbc2731-c329-47f1-bafc-61cfc6b8cd07-c000.csv
11:35:05.331 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 1, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=one/part-00000-0db34178-4ac9-401c-aec0-bcb8ec2c19c2-c000.csv
11:35:05.333 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 1, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=one/part-00001-0db34178-4ac9-401c-aec0-bcb8ec2c19c2-c000.csv
11:35:05.334 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=two/part-00001-0bbc2731-c329-47f1-bafc-61cfc6b8cd07-c000.csv
11:35:05.383 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=two/part-00000-0bbc2731-c329-47f1-bafc-61cfc6b8cd07-c000.csv
11:35:05.385 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 1, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=one/part-00000-0db34178-4ac9-401c-aec0-bcb8ec2c19c2-c000.csv
11:35:05.386 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=two/part-00001-0bbc2731-c329-47f1-bafc-61cfc6b8cd07-c000.csv
11:35:05.386 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 1, schema size: 3
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-1c66e110-bac8-466f-abce-b5f1c415927a/part=one/part-00001-0db34178-4ac9-401c-aec0-bcb8ec2c19c2-c000.csv
[info] - append column at the end (2 seconds, 673 milliseconds)
11:35:07.445 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 2
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00000-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:07.447 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 2
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00001-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:07.482 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 2
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00000-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:07.484 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 2
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00001-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:08.878 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=two/part-00000-07556e85-4b19-4da3-b430-a8adc4032e5f-c000.csv
11:35:08.878 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00000-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:08.879 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=two/part-00001-07556e85-4b19-4da3-b430-a8adc4032e5f-c000.csv
11:35:08.879 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00001-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:08.928 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=two/part-00000-07556e85-4b19-4da3-b430-a8adc4032e5f-c000.csv
11:35:08.928 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00000-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:08.930 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 3, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=three/part-00001-ed88aeb9-05f3-4704-aff1-f34173d810f1-c000.csv
11:35:08.930 WARN org.apache.spark.sql.catalyst.csv.CSVHeaderChecker: Number of column in CSV header is not equal to number of fields in the schema:
 Header length: 2, schema size: 1
CSV file: file:///home/jenkins/workspace/NewSparkPullRequestBuilder@2/target/tmp/spark-68fe213e-3b56-48f7-8aa7-a6e63f552ba5/part=two/part-00001-07556e85-4b19-4da3-b430-a8adc4032e5f-c000.csv
[info] - hide column at the end (3 seconds, 543 milliseconds)
[info] - change column type from byte to short/int/long (1 second, 706 milliseconds)
Build timed out (after 300 minutes). Marking the build as failed.
Build was aborted
Archiving artifacts
Exception in thread "Thread-217" Recording test results
Finished: FAILURE