FailedConsole Output

Skipping 25,145 KB.. Full Log
che.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:98)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:152)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:151)
	at org.apache.spark.sql.hive.test.TestHiveQueryExecution.analyzed$lzycompute(TestHive.scala:606)
	at org.apache.spark.sql.hive.test.TestHiveQueryExecution.analyzed(TestHive.scala:589)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:58)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:87)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:241)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3487)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3485)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:88)
	at org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withTable$2(SQLTestUtils.scala:291)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withTable$1(SQLTestUtils.scala:290)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.execution.datasources.parquet.ParquetCompatibilityTest.withTable(ParquetCompatibilityTest.scala:35)
	at org.apache.spark.sql.hive.ParquetHiveCompatibilitySuite.testParquetHiveCompatibility(ParquetHiveCompatibilitySuite.scala:47)
	at org.apache.spark.sql.hive.ParquetHiveCompatibilitySuite.$anonfun$new$7(ParquetHiveCompatibilitySuite.scala:142)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - SPARK-16344: array of struct with a single field named 'array_element' (1 second, 784 milliseconds)
15:41:43.715 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:41:43.715 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:41:43.715 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:41:43.789 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:41:43.789 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:41:43.790 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] QueryPartitionSuite:
[info] - SPARK-5068: query data when path doesn't exist (7 seconds, 454 milliseconds)
15:41:58.238 WARN org.apache.spark.rdd.HadoopRDD: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-4d21173a-351f-4440-a545-bf0764aa6bcf/ds=4 doesn't exist and no partitions returned from this path.
org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-4d21173a-351f-4440-a545-bf0764aa6bcf/ds=4
	at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:297)
	at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:239)
	at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:325)
	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1(UnionRDD.scala:85)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1$adapted(UnionRDD.scala:85)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.rdd.UnionRDD.getPartitions(UnionRDD.scala:85)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.sql.execution.SQLExecutionRDD.getPartitions(SQLExecutionRDD.scala:44)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2181)
	at org.apache.spark.rdd.RDD.count(RDD.scala:1227)
	at org.apache.spark.sql.QueryTest$.$anonfun$getErrorMessageInCheckAnswer$1(QueryTest.scala:270)
	at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.QueryTest$.getErrorMessageInCheckAnswer(QueryTest.scala:270)
	at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:247)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:156)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:164)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3(QueryPartitionSuite.scala:67)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3$adapted(QueryPartitionSuite.scala:39)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:76)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:75)
	at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:161)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:75)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:74)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$2(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTable(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$1(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView(SQLTestUtils.scala:260)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView$(SQLTestUtils.scala:258)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempView(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.queryWhenPathNotExist(QueryPartitionSuite.scala:38)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$4(QueryPartitionSuite.scala:82)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:52)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:36)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:231)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:229)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$3(QueryPartitionSuite.scala:80)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
15:41:58.692 WARN org.apache.spark.rdd.HadoopRDD: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-4d21173a-351f-4440-a545-bf0764aa6bcf/ds=4 doesn't exist and no partitions returned from this path.
org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-4d21173a-351f-4440-a545-bf0764aa6bcf/ds=4
	at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:297)
	at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:239)
	at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:325)
	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1(UnionRDD.scala:85)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1$adapted(UnionRDD.scala:85)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.rdd.UnionRDD.getPartitions(UnionRDD.scala:85)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2181)
	at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1004)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
	at org.apache.spark.rdd.RDD.collect(RDD.scala:1003)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:384)
	at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3497)
	at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2831)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3487)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3485)
	at org.apache.spark.sql.Dataset.collect(Dataset.scala:2831)
	at org.apache.spark.sql.QueryTest$.getErrorMessageInCheckAnswer(QueryTest.scala:274)
	at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:247)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:156)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:164)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3(QueryPartitionSuite.scala:67)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3$adapted(QueryPartitionSuite.scala:39)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:76)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:75)
	at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:161)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:75)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:74)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$2(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTable(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$1(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView(SQLTestUtils.scala:260)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView$(SQLTestUtils.scala:258)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempView(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.queryWhenPathNotExist(QueryPartitionSuite.scala:38)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$4(QueryPartitionSuite.scala:82)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:52)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:36)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:231)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:229)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$3(QueryPartitionSuite.scala:80)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - Replace spark.sql.hive.verifyPartitionPath by spark.files.ignoreMissingFiles (7 seconds, 777 milliseconds)
15:41:59.055 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/table_with_timestamp_partition specified for non-external table:table_with_timestamp_partition
[info] - SPARK-21739: Cast expression should initialize timezoneId (2 seconds, 918 milliseconds)
15:42:02.062 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:02.062 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:02.062 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:02.142 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:02.142 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:02.143 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HiveOrcPartitionDiscoverySuite:
[info] - read partitioned table - normal case (2 seconds, 859 milliseconds)
[info] - read partitioned table - with nulls (2 seconds, 413 milliseconds)
[info] - SPARK-27162: handle pathfilter configuration correctly (1 second, 122 milliseconds)
15:42:08.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:08.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:08.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:08.685 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:08.685 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:08.685 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] PruningSuite:
15:42:08.773 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:08.773 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:08.773 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:08.827 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:08.827 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:08.827 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:08.852 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - pruning test (2 seconds, 897 milliseconds)
15:42:11.974 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:11.974 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:12.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:12.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:12.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:12.078 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:12.078 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:12.078 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:12.098 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:12.736 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - query test (3 seconds, 908 milliseconds)
[info] - Column pruning - with non-partitioned table - pruning test (36 milliseconds)
15:42:15.768 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:15.768 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:15.899 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:15.899 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:15.940 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:15.940 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:15.940 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:16.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:16.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:16.013 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:16.032 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - with non-partitioned table - query test (1 second, 166 milliseconds)
[info] - Column pruning - with multiple projects - pruning test (69 milliseconds)
15:42:17.003 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:17.003 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:17.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:17.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:17.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:17.102 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:17.102 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:17.102 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:17.121 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - with multiple projects - query test (935 milliseconds)
[info] - Column pruning - projects alias substituting - pruning test (73 milliseconds)
15:42:18.015 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:18.015 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:18.059 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:18.059 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:18.060 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:18.112 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:18.112 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:18.113 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:18.130 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - projects alias substituting - query test (847 milliseconds)
[info] - Column pruning - filter alias in-lining - pruning test (52 milliseconds)
15:42:18.902 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:18.902 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:18.937 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:18.937 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:18.938 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:18.989 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:18.989 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:18.989 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:19.007 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - filter alias in-lining - query test (831 milliseconds)
[info] - Column pruning - without filters - pruning test (55 milliseconds)
15:42:19.802 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:19.802 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:19.842 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:19.842 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:19.842 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:19.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:19.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:19.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:19.915 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - without filters - query test (872 milliseconds)
[info] - Column pruning - simple top project without aliases - pruning test (66 milliseconds)
15:42:20.745 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:20.745 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:20.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:20.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:20.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:20.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:20.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:20.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:20.858 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - simple top project without aliases - query test (966 milliseconds)
[info] - Column pruning - non-trivial top project with aliases - pruning test (82 milliseconds)
15:42:21.790 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:21.790 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:21.833 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:21.833 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:21.833 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:21.887 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:21.887 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:21.888 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:21.912 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Column pruning - non-trivial top project with aliases - query test (918 milliseconds)
[info] - Partition pruning - non-partitioned, non-trivial project - pruning test (42 milliseconds)
15:42:22.743 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:22.743 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:22.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:22.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:22.787 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:22.843 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:22.843 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:22.843 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:22.866 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Partition pruning - non-partitioned, non-trivial project - query test (870 milliseconds)
[info] - Partition pruning - non-partitioned table - pruning test (40 milliseconds)
15:42:23.658 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:23.658 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:23.703 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:23.703 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:23.704 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:23.765 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:23.765 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:23.765 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:23.788 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
[info] - Partition pruning - non-partitioned table - query test (880 milliseconds)
15:42:24.469 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - pruning test (2 seconds, 939 milliseconds)
15:42:27.485 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:27.485 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:27.638 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:42:27.638 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:42:27.683 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:27.683 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:27.683 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:27.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:27.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:27.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:27.786 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:28.463 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
15:42:31.460 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - query test (7 seconds, 218 milliseconds)
[info] - Partition pruning - with filter on int partition key - pruning test (151 milliseconds)
15:42:34.945 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:34.945 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:35.063 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:35.063 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:35.128 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:42:35.128 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:42:35.162 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:35.162 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:35.162 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:35.229 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:35.229 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:35.229 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:35.247 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:35.821 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
15:42:38.538 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on int partition key - query test (6 seconds, 939 milliseconds)
[info] - Partition pruning - left only 1 partition - pruning test (123 milliseconds)
15:42:42.004 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:42.004 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:42.125 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:42.126 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:42.233 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:42:42.233 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:42:42.288 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:42.288 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:42.289 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:42.376 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:42.376 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:42.376 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:42.398 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:43.032 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
15:42:45.703 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - left only 1 partition - query test (6 seconds, 792 milliseconds)
[info] - Partition pruning - all partitions pruned - pruning test (124 milliseconds)
15:42:48.855 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:48.856 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:49.004 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:49.004 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:49.086 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:42:49.086 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:42:49.133 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:49.133 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:49.133 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:49.213 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:49.214 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:49.214 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:49.238 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:49.998 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
15:42:53.070 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - all partitions pruned - query test (7 seconds, 552 milliseconds)
[info] - Partition pruning - pruning with both column key and partition key - pruning test (163 milliseconds)
15:42:56.705 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:42:56.705 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:42:56.868 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:42:56.868 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:42:56.952 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:42:56.952 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:42:57.002 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:57.002 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:57.002 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:57.113 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:42:57.113 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:42:57.113 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:42:57.140 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:42:57.749 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart specified for non-external table:srcpart
15:43:00.659 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - pruning with both column key and partition key - query test (7 seconds, 424 milliseconds)
15:43:04.068 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:43:04.068 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:43:04.218 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart does not exist; Force to delete it.
15:43:04.218 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart
15:43:04.296 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1 does not exist; Force to delete it.
15:43:04.296 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/srcpart1
15:43:04.332 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:43:04.332 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:43:04.332 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:43:04.398 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:43:04.398 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:43:04.398 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HiveHadoopDelegationTokenManagerSuite:
[info] - default configuration (32 milliseconds)
15:43:04.484 WARN org.apache.spark.deploy.security.HadoopDelegationTokenManager: spark.yarn.security.credentials.hive.enabled is deprecated.  Please use spark.security.credentials.hive.enabled instead.
[info] - using deprecated configurations (3 milliseconds)
15:43:04.920 WARN org.apache.spark.util.Utils: Your hostname, amp-jenkins-staging-worker-02 resolves to a loopback address: 127.0.1.1; using 192.168.10.32 instead (on interface eno1)
15:43:04.921 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
[info] - SPARK-23209: obtain tokens when Hive classes are not available (654 milliseconds)
[info] ObjectHashAggregateSuite:
[info] - typed_count without grouping keys (294 milliseconds)
[info] - typed_count without grouping keys and empty input (174 milliseconds)
[info] - typed_count with grouping keys (240 milliseconds)
[info] - typed_count fallback to sort-based aggregation (356 milliseconds)
[info] - random input data types (11 seconds, 486 milliseconds)
[info] - randomized aggregation test - [typed] - with grouping keys - with empty input (800 milliseconds)
[info] - randomized aggregation test - [typed] - with grouping keys - with non-empty input (1 second, 684 milliseconds)
[info] - randomized aggregation test - [typed] - without grouping keys - with empty input (325 milliseconds)
[info] - randomized aggregation test - [typed] - without grouping keys - with non-empty input (297 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - with grouping keys - with empty input (1 second, 60 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - with grouping keys - with non-empty input (2 seconds, 91 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - without grouping keys - with empty input (343 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - without grouping keys - with non-empty input (289 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - with grouping keys - with empty input (927 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - with grouping keys - with non-empty input (2 seconds, 292 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - without grouping keys - with empty input (328 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - without grouping keys - with non-empty input (293 milliseconds)
[info] - randomized aggregation test - [with distinct] - with grouping keys - with empty input (1 second, 602 milliseconds)
[info] - randomized aggregation test - [with distinct] - with grouping keys - with non-empty input (3 seconds, 94 milliseconds)
[info] - randomized aggregation test - [with distinct] - without grouping keys - with empty input (757 milliseconds)
[info] - randomized aggregation test - [with distinct] - without grouping keys - with non-empty input (958 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - with grouping keys - with empty input (820 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - with grouping keys - with non-empty input (2 seconds, 82 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - without grouping keys - with empty input (426 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - without grouping keys - with non-empty input (392 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - with grouping keys - with empty input (760 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - with grouping keys - with non-empty input (2 seconds, 22 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - without grouping keys - with empty input (374 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - without grouping keys - with non-empty input (338 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - with grouping keys - with empty input (957 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - with grouping keys - with non-empty input (3 seconds, 155 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - without grouping keys - with empty input (752 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - without grouping keys - with non-empty input (866 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - with grouping keys - with empty input (864 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - with grouping keys - with non-empty input (2 seconds, 124 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - without grouping keys - with empty input (377 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - without grouping keys - with non-empty input (308 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - with grouping keys - with empty input (2 seconds, 90 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - with grouping keys - with non-empty input (3 seconds, 174 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - without grouping keys - with empty input (1 second, 222 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - without grouping keys - with non-empty input (1 second, 210 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - with grouping keys - with empty input (1 second, 331 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 49 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - without grouping keys - with empty input (760 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 359 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - with grouping keys - with empty input (1 second, 20 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - with grouping keys - with non-empty input (2 seconds, 227 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - without grouping keys - with empty input (434 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - without grouping keys - with non-empty input (372 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - with grouping keys - with empty input (1 second, 67 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - with grouping keys - with non-empty input (3 seconds, 468 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - without grouping keys - with empty input (610 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - without grouping keys - with non-empty input (1 second, 41 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - with grouping keys - with empty input (910 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 492 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - without grouping keys - with empty input (729 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 101 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with empty input (1 second, 86 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 405 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with empty input (605 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with non-empty input (848 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with empty input (700 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 392 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with empty input (854 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 352 milliseconds)
[info] - SPARK-18403 Fix unsafe data false sharing issue in ObjectHashAggregateExec (1 second, 246 milliseconds)
15:44:35.918 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:35.918 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:35.918 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:44:35.984 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:35.984 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:35.984 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] TestHiveSuite:
15:44:36.018 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src specified for non-external table:src
15:44:36.712 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src does not exist; Force to delete it.
15:44:36.712 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-3c132709-044e-46ca-aacb-9ed747a6d0d0/src
15:44:36.748 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:36.748 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:36.748 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:44:36.798 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:36.798 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:36.798 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:44:36.885 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:36.885 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:36.886 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:44:36.966 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:36.966 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:36.967 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] - load test table based on case sensitivity (976 milliseconds)
[info] - SPARK-15887: hive-site.xml should be loaded (1 millisecond)
15:44:37.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:37.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:37.048 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
15:44:37.098 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
15:44:37.098 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
15:44:37.098 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] ClasspathDependenciesSuite:
[info] - shaded Protobuf (5 milliseconds)
[info] - shaded Kryo (0 milliseconds)
[info] - hive-common (0 milliseconds)
[info] - hive-exec (0 milliseconds)
[info] - Forbidden Dependencies (4 milliseconds)
[info] - parquet-hadoop-bundle (1 millisecond)
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaMetastoreDataSourcesSuite.saveTableAndQueryIt started
15:44:38.008 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`javasavedtable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[info] Test run finished: 0 failed, 0 ignored, 1 total, 0.975s
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.testUDAF started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.saveTableAndQueryIt started
[info] Test run finished: 0 failed, 0 ignored, 2 total, 2.243s
15:44:40.894 WARN org.apache.spark.network.server.TransportChannelHandler: Exception in connection from /192.168.10.32:44338
java.util.concurrent.RejectedExecutionException: event executor terminated
	at io.netty.util.concurrent.SingleThreadEventExecutor.reject(SingleThreadEventExecutor.java:981)
	at io.netty.util.concurrent.SingleThreadEventExecutor.offerTask(SingleThreadEventExecutor.java:388)
	at io.netty.util.concurrent.SingleThreadEventExecutor.addTask(SingleThreadEventExecutor.java:381)
	at io.netty.util.concurrent.SingleThreadEventExecutor.execute(SingleThreadEventExecutor.java:880)
	at io.netty.channel.DefaultChannelPipeline.destroyUp(DefaultChannelPipeline.java:863)
	at io.netty.channel.DefaultChannelPipeline.destroy(DefaultChannelPipeline.java:848)
	at io.netty.channel.DefaultChannelPipeline.access$700(DefaultChannelPipeline.java:46)
	at io.netty.channel.DefaultChannelPipeline$HeadContext.channelUnregistered(DefaultChannelPipeline.java:1404)
	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:193)
	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:179)
	at io.netty.channel.DefaultChannelPipeline.fireChannelUnregistered(DefaultChannelPipeline.java:833)
	at io.netty.channel.AbstractChannel$AbstractUnsafe$8.run(AbstractChannel.java:827)
	at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
	at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:510)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:518)
	at io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1044)
	at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
	at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
	at java.lang.Thread.run(Thread.java:748)
15:44:40.894 WARN org.apache.spark.network.server.TransportChannelHandler: Exception in connection from /192.168.10.32:41326
java.util.concurrent.RejectedExecutionException: event executor terminated
	at io.netty.util.concurrent.SingleThreadEventExecutor.reject(SingleThreadEventExecutor.java:981)
	at io.netty.util.concurrent.SingleThreadEventExecutor.offerTask(SingleThreadEventExecutor.java:388)
	at io.netty.util.concurrent.SingleThreadEventExecutor.addTask(SingleThreadEventExecutor.java:381)
	at io.netty.util.concurrent.SingleThreadEventExecutor.execute(SingleThreadEventExecutor.java:880)
	at io.netty.channel.DefaultChannelPipeline.destroyUp(DefaultChannelPipeline.java:863)
	at io.netty.channel.DefaultChannelPipeline.destroy(DefaultChannelPipeline.java:848)
	at io.netty.channel.DefaultChannelPipeline.access$700(DefaultChannelPipeline.java:46)
	at io.netty.channel.DefaultChannelPipeline$HeadContext.channelUnregistered(DefaultChannelPipeline.java:1404)
	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:193)
	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:179)
	at io.netty.channel.DefaultChannelPipeline.fireChannelUnregistered(DefaultChannelPipeline.java:833)
	at io.netty.channel.AbstractChannel$AbstractUnsafe$8.run(AbstractChannel.java:827)
	at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
	at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasksFrom(SingleThreadEventExecutor.java:466)
	at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:415)
	at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:818)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:528)
	at io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1044)
	at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
	at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
	at java.lang.Thread.run(Thread.java:748)
15:45:10.701 WARN org.apache.spark.network.util.JavaUtils: Attempt to delete using native Unix OS command failed for path = /home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-a6ee93f7-bbda-4871-ad36-19522f2ab5c5. Falling back to Java IO way
java.io.IOException: Failed to delete: /home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-a6ee93f7-bbda-4871-ad36-19522f2ab5c5
	at org.apache.spark.network.util.JavaUtils.deleteRecursivelyUsingUnixNative(JavaUtils.java:163)
	at org.apache.spark.network.util.JavaUtils.deleteRecursively(JavaUtils.java:110)
	at org.apache.spark.network.util.JavaUtils.deleteRecursively(JavaUtils.java:91)
	at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:1079)
	at org.apache.spark.util.ShutdownHookManager$.$anonfun$new$4(ShutdownHookManager.scala:65)
	at org.apache.spark.util.ShutdownHookManager$.$anonfun$new$4$adapted(ShutdownHookManager.scala:62)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
	at org.apache.spark.util.ShutdownHookManager$.$anonfun$new$2(ShutdownHookManager.scala:62)
	at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216)
	at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1932)
	at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at scala.util.Try$.apply(Try.scala:213)
	at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
	at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.InterruptedException
	at java.lang.Object.wait(Native Method)
	at java.lang.Object.wait(Object.java:502)
	at java.lang.UNIXProcess.waitFor(UNIXProcess.java:395)
	at org.apache.spark.network.util.JavaUtils.deleteRecursivelyUsingUnixNative(JavaUtils.java:161)
	... 23 more
15:45:10.701 WARN org.apache.hadoop.util.ShutdownHookManager: ShutdownHook '$anon$2' timeout, java.util.concurrent.TimeoutException
java.util.concurrent.TimeoutException
	at java.util.concurrent.FutureTask.get(FutureTask.java:205)
	at org.apache.hadoop.util.ShutdownHookManager.executeShutdown(ShutdownHookManager.java:124)
	at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:95)
[info] ScalaTest
[info] Run completed in 1 hour, 52 minutes, 3 seconds.
[info] Total number of tests run: 3461
[info] Suites: completed 125, aborted 0
[info] Tests: succeeded 3461, failed 0, canceled 0, ignored 595, pending 0
[info] All tests passed.
[info] Passed: Total 3464, Failed 0, Errors 0, Passed 3464, Ignored 595
[error] (sql-kafka-0-10/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] (sql/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 6768 s, completed Jan 11, 2020 3:45:17 PM
[error] running /home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/build/sbt -Phadoop-3.2 -Phive-2.3 -Pkinesis-asl -Phive-thriftserver -Pspark-ganglia-lgpl -Pyarn -Phadoop-cloud -Phive -Pmesos -Pkubernetes test ; received return code 1
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Finished: FAILURE