FailedConsole Output

Skipping 25,085 KB.. Full Log
onfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - struct (1 second, 651 milliseconds)
18:00:44.197 WARN org.apache.spark.sql.execution.command.DropTableCommand: org.apache.spark.sql.AnalysisException: Path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-aa2e5825-0357-4fe9-9c1a-d5b88af71621;
org.apache.spark.sql.AnalysisException: Path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-aa2e5825-0357-4fe9-9c1a-d5b88af71621;
	at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$checkAndGlobPathIfNecessary$1(DataSource.scala:754)
	at scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:245)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.flatMap(TraversableLike.scala:245)
	at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:242)
	at scala.collection.immutable.List.flatMap(List.scala:355)
	at org.apache.spark.sql.execution.datasources.DataSource$.checkAndGlobPathIfNecessary(DataSource.scala:741)
	at org.apache.spark.sql.execution.datasources.DataSource.checkAndGlobPathIfNecessary(DataSource.scala:570)
	at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:393)
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.$anonfun$convertToLogicalRelation$5(HiveMetastoreCatalog.scala:248)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.$anonfun$convertToLogicalRelation$4(HiveMetastoreCatalog.scala:238)
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.withTableCreationLock(HiveMetastoreCatalog.scala:58)
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.convertToLogicalRelation(HiveMetastoreCatalog.scala:231)
	at org.apache.spark.sql.hive.HiveMetastoreCatalog.convert(HiveMetastoreCatalog.scala:137)
	at org.apache.spark.sql.hive.RelationConversions$$anonfun$apply$4.applyOrElse(HiveStrategies.scala:220)
	at org.apache.spark.sql.hive.RelationConversions$$anonfun$apply$4.applyOrElse(HiveStrategies.scala:207)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$2(AnalysisHelper.scala:108)
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$1(AnalysisHelper.scala:108)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown(AnalysisHelper.scala:106)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown$(AnalysisHelper.scala:104)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$4(AnalysisHelper.scala:113)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:376)
	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:214)
	at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:374)
	at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:327)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDown$1(AnalysisHelper.scala:113)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:194)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown(AnalysisHelper.scala:106)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDown$(AnalysisHelper.scala:104)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDown(LogicalPlan.scala:29)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators(AnalysisHelper.scala:73)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators$(AnalysisHelper.scala:72)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:29)
	at org.apache.spark.sql.hive.RelationConversions.apply(HiveStrategies.scala:207)
	at org.apache.spark.sql.hive.RelationConversions.apply(HiveStrategies.scala:191)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:130)
	at scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)
	at scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)
	at scala.collection.mutable.ArrayBuffer.foldLeft(ArrayBuffer.scala:49)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:127)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:119)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:119)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:168)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:162)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:122)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:98)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
	at org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:98)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:146)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
	at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:145)
	at org.apache.spark.sql.hive.test.TestHiveQueryExecution.analyzed$lzycompute(TestHive.scala:606)
	at org.apache.spark.sql.hive.test.TestHiveQueryExecution.analyzed(TestHive.scala:589)
	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:55)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:87)
	at org.apache.spark.sql.SparkSession.table(SparkSession.scala:589)
	at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:235)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:71)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:69)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:80)
	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3407)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3403)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:88)
	at org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withTable$2(SQLTestUtils.scala:291)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$withTable$1(SQLTestUtils.scala:290)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1386)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.execution.datasources.parquet.ParquetCompatibilityTest.withTable(ParquetCompatibilityTest.scala:35)
	at org.apache.spark.sql.hive.ParquetHiveCompatibilitySuite.testParquetHiveCompatibility(ParquetHiveCompatibilitySuite.scala:47)
	at org.apache.spark.sql.hive.ParquetHiveCompatibilitySuite.$anonfun$new$7(ParquetHiveCompatibilitySuite.scala:142)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - SPARK-16344: array of struct with a single field named 'array_element' (1 second, 721 milliseconds)
18:00:44.323 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:00:44.323 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:00:44.323 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:00:44.379 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:00:44.379 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:00:44.379 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] QueryPartitionSuite:
[info] - SPARK-5068: query data when path doesn't exist (7 seconds, 786 milliseconds)
18:00:59.380 WARN org.apache.spark.rdd.HadoopRDD: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-dc0ebd17-31b2-479a-9039-2beb3d646305/ds=4 doesn't exist and no partitions returned from this path.
org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-dc0ebd17-31b2-479a-9039-2beb3d646305/ds=4
	at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:297)
	at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:239)
	at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:325)
	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1(UnionRDD.scala:85)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1$adapted(UnionRDD.scala:85)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.rdd.UnionRDD.getPartitions(UnionRDD.scala:85)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.sql.execution.SQLExecutionRDD.getPartitions(SQLExecutionRDD.scala:44)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2169)
	at org.apache.spark.rdd.RDD.count(RDD.scala:1227)
	at org.apache.spark.sql.QueryTest$.$anonfun$checkAnswer$1(QueryTest.scala:256)
	at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:256)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:153)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:164)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3(QueryPartitionSuite.scala:67)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3$adapted(QueryPartitionSuite.scala:39)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:76)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:75)
	at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:161)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:75)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:74)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$2(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTable(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$1(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView(SQLTestUtils.scala:260)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView$(SQLTestUtils.scala:258)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempView(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.queryWhenPathNotExist(QueryPartitionSuite.scala:38)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$4(QueryPartitionSuite.scala:82)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:52)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:36)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:231)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:229)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$3(QueryPartitionSuite.scala:80)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
18:00:59.837 WARN org.apache.spark.rdd.HadoopRDD: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-dc0ebd17-31b2-479a-9039-2beb3d646305/ds=4 doesn't exist and no partitions returned from this path.
org.apache.hadoop.mapred.InvalidInputException: Input path does not exist: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/spark-dc0ebd17-31b2-479a-9039-2beb3d646305/ds=4
	at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:297)
	at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:239)
	at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:325)
	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:205)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1(UnionRDD.scala:85)
	at org.apache.spark.rdd.UnionRDD.$anonfun$getPartitions$1$adapted(UnionRDD.scala:85)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.rdd.UnionRDD.getPartitions(UnionRDD.scala:85)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
	at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:276)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:272)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2169)
	at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1004)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:388)
	at org.apache.spark.rdd.RDD.collect(RDD.scala:1003)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:365)
	at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3417)
	at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2747)
	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3407)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3403)
	at org.apache.spark.sql.Dataset.collect(Dataset.scala:2747)
	at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:260)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:153)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:164)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3(QueryPartitionSuite.scala:67)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$3$adapted(QueryPartitionSuite.scala:39)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:76)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:75)
	at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:161)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:75)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:74)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempDir(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$2(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:290)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:288)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTable(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$queryWhenPathNotExist$1(QueryPartitionSuite.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView(SQLTestUtils.scala:260)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withTempView$(SQLTestUtils.scala:258)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withTempView(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.queryWhenPathNotExist(QueryPartitionSuite.scala:38)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$4(QueryPartitionSuite.scala:82)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:52)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:36)
	at org.apache.spark.sql.hive.QueryPartitionSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:231)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:229)
	at org.apache.spark.sql.hive.QueryPartitionSuite.withSQLConf(QueryPartitionSuite.scala:33)
	at org.apache.spark.sql.hive.QueryPartitionSuite.$anonfun$new$3(QueryPartitionSuite.scala:80)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1124)
	at org.scalatest.Suite.run$(Suite.scala:1106)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[info] - Replace spark.sql.hive.verifyPartitionPath by spark.files.ignoreMissingFiles (7 seconds, 959 milliseconds)
18:01:00.152 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/table_with_timestamp_partition specified for non-external table:table_with_timestamp_partition
[info] - SPARK-21739: Cast expression should initialize timezoneId (2 seconds, 611 milliseconds)
18:01:02.831 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:02.831 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:02.831 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:02.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:02.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:02.897 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HiveOrcPartitionDiscoverySuite:
[info] - read partitioned table - normal case (2 seconds, 444 milliseconds)
[info] - read partitioned table - with nulls (2 seconds, 86 milliseconds)
[info] - SPARK-27162: handle pathfilter configuration correctly (1 second, 81 milliseconds)
18:01:08.614 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:08.614 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:08.614 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:08.672 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:08.672 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:08.672 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] PruningSuite:
18:01:08.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:08.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:08.763 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:08.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:08.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:08.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:08.847 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - pruning test (2 seconds, 985 milliseconds)
18:01:12.122 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:12.123 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:12.165 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:12.165 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:12.165 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:12.244 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:12.244 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:12.244 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:12.266 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:12.889 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - query test (3 seconds, 775 milliseconds)
[info] - Column pruning - with non-partitioned table - pruning test (54 milliseconds)
18:01:15.726 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:15.726 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:15.862 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:15.862 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:15.902 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:15.902 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:15.902 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:15.972 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:15.973 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:15.973 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:15.993 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - with non-partitioned table - query test (1 second, 222 milliseconds)
[info] - Column pruning - with multiple projects - pruning test (59 milliseconds)
18:01:16.988 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:16.988 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:17.021 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:17.021 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:17.022 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:17.071 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:17.071 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:17.071 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:17.092 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - with multiple projects - query test (718 milliseconds)
[info] - Column pruning - projects alias substituting - pruning test (52 milliseconds)
18:01:17.775 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:17.775 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:17.815 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:17.815 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:17.815 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:17.862 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:17.862 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:17.862 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:17.880 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - projects alias substituting - query test (842 milliseconds)
[info] - Column pruning - filter alias in-lining - pruning test (97 milliseconds)
18:01:18.704 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:18.704 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:18.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:18.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:18.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:18.798 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:18.798 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:18.799 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:18.820 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - filter alias in-lining - query test (734 milliseconds)
[info] - Column pruning - without filters - pruning test (73 milliseconds)
18:01:19.529 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:19.529 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:19.578 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:19.578 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:19.578 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:19.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:19.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:19.630 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:19.651 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - without filters - query test (852 milliseconds)
[info] - Column pruning - simple top project without aliases - pruning test (65 milliseconds)
18:01:20.439 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:20.439 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:20.478 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:20.478 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:20.478 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:20.533 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:20.533 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:20.533 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:20.555 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - simple top project without aliases - query test (900 milliseconds)
[info] - Column pruning - non-trivial top project with aliases - pruning test (62 milliseconds)
18:01:21.384 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:21.385 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:21.419 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:21.419 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:21.419 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:21.471 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:21.471 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:21.471 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:21.494 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Column pruning - non-trivial top project with aliases - query test (796 milliseconds)
[info] - Partition pruning - non-partitioned, non-trivial project - pruning test (49 milliseconds)
18:01:22.234 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:22.234 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:22.271 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:22.271 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:22.271 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:22.328 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:22.328 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:22.328 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:22.354 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Partition pruning - non-partitioned, non-trivial project - query test (851 milliseconds)
[info] - Partition pruning - non-partitioned table - pruning test (49 milliseconds)
18:01:23.138 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:23.138 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:23.174 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:23.174 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:23.174 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:23.231 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:23.231 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:23.231 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:23.253 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
[info] - Partition pruning - non-partitioned table - query test (841 milliseconds)
18:01:23.931 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - pruning test (2 seconds, 674 milliseconds)
18:01:26.647 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:26.647 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:26.776 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:26.777 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:26.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:26.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:26.817 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:26.898 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:26.898 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:26.899 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:26.921 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:27.480 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
18:01:29.707 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - query test (5 seconds, 808 milliseconds)
[info] - Partition pruning - with filter on int partition key - pruning test (138 milliseconds)
18:01:32.717 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:32.717 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:32.849 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:32.849 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:32.933 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:32.933 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:32.972 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:32.972 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:32.972 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:33.045 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:33.045 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:33.046 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:33.066 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:33.633 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
18:01:36.366 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on int partition key - query test (6 seconds, 718 milliseconds)
[info] - Partition pruning - left only 1 partition - pruning test (166 milliseconds)
18:01:39.589 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:39.589 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:39.735 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:39.735 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:39.811 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:39.811 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:39.856 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:39.856 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:39.856 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:39.937 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:39.937 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:39.937 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:39.960 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:40.512 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
18:01:43.122 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - left only 1 partition - query test (6 seconds, 249 milliseconds)
[info] - Partition pruning - all partitions pruned - pruning test (127 milliseconds)
18:01:45.899 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:45.900 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:46.037 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:46.037 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:46.111 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:46.111 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:46.150 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:46.150 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:46.150 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:46.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:46.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:46.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:46.247 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:46.859 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
18:01:49.722 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - all partitions pruned - query test (6 seconds, 786 milliseconds)
[info] - Partition pruning - pruning with both column key and partition key - pruning test (146 milliseconds)
18:01:52.911 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:52.911 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:53.054 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:53.054 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:53.129 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:53.129 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:53.166 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:53.166 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:53.166 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:53.238 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:53.238 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:53.238 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:53.256 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:01:53.824 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart specified for non-external table:srcpart
18:01:56.356 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - pruning with both column key and partition key - query test (6 seconds, 515 milliseconds)
18:01:59.437 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:01:59.437 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:01:59.584 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart does not exist; Force to delete it.
18:01:59.584 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart
18:01:59.652 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1 does not exist; Force to delete it.
18:01:59.652 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/srcpart1
18:01:59.692 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:59.692 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:59.693 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:01:59.770 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:01:59.770 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:01:59.770 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HiveHadoopDelegationTokenManagerSuite:
[info] - default configuration (26 milliseconds)
18:01:59.839 WARN org.apache.spark.deploy.security.HadoopDelegationTokenManager: spark.yarn.security.credentials.hive.enabled is deprecated.  Please use spark.security.credentials.hive.enabled instead.
[info] - using deprecated configurations (2 milliseconds)
18:02:00.246 WARN org.apache.spark.util.Utils: Your hostname, amp-jenkins-staging-worker-02 resolves to a loopback address: 127.0.1.1; using 192.168.10.32 instead (on interface eno1)
18:02:00.247 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
[info] - SPARK-23209: obtain tokens when Hive classes are not available (559 milliseconds)
[info] ObjectHashAggregateSuite:
[info] - typed_count without grouping keys (303 milliseconds)
[info] - typed_count without grouping keys and empty input (173 milliseconds)
[info] - typed_count with grouping keys (228 milliseconds)
[info] - typed_count fallback to sort-based aggregation (486 milliseconds)
[info] - random input data types (9 seconds, 736 milliseconds)
[info] - randomized aggregation test - [typed] - with grouping keys - with empty input (737 milliseconds)
[info] - randomized aggregation test - [typed] - with grouping keys - with non-empty input (1 second, 576 milliseconds)
[info] - randomized aggregation test - [typed] - without grouping keys - with empty input (290 milliseconds)
[info] - randomized aggregation test - [typed] - without grouping keys - with non-empty input (367 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - with grouping keys - with empty input (1 second, 17 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - with grouping keys - with non-empty input (1 second, 814 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - without grouping keys - with empty input (221 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe] - without grouping keys - with non-empty input (265 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - with grouping keys - with empty input (710 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - with grouping keys - with non-empty input (1 second, 994 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - without grouping keys - with empty input (326 milliseconds)
[info] - randomized aggregation test - [with partial + safe] - without grouping keys - with non-empty input (294 milliseconds)
[info] - randomized aggregation test - [with distinct] - with grouping keys - with empty input (1 second, 397 milliseconds)
[info] - randomized aggregation test - [with distinct] - with grouping keys - with non-empty input (2 seconds, 648 milliseconds)
[info] - randomized aggregation test - [with distinct] - without grouping keys - with empty input (666 milliseconds)
[info] - randomized aggregation test - [with distinct] - without grouping keys - with non-empty input (946 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - with grouping keys - with empty input (800 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - with grouping keys - with non-empty input (1 second, 813 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - without grouping keys - with empty input (300 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe] - without grouping keys - with non-empty input (326 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - with grouping keys - with empty input (672 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - with grouping keys - with non-empty input (1 second, 797 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - without grouping keys - with empty input (277 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe] - without grouping keys - with non-empty input (296 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - with grouping keys - with empty input (832 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - with grouping keys - with non-empty input (2 seconds, 465 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - without grouping keys - with empty input (611 milliseconds)
[info] - randomized aggregation test - [typed, with distinct] - without grouping keys - with non-empty input (950 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - with grouping keys - with empty input (793 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - with grouping keys - with non-empty input (1 second, 971 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - without grouping keys - with empty input (429 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe] - without grouping keys - with non-empty input (385 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - with grouping keys - with empty input (1 second, 728 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - with grouping keys - with non-empty input (2 seconds, 645 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - without grouping keys - with empty input (450 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with distinct] - without grouping keys - with non-empty input (854 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - with grouping keys - with empty input (849 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - with grouping keys - with non-empty input (2 seconds, 772 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - without grouping keys - with empty input (666 milliseconds)
[info] - randomized aggregation test - [with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 13 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - with grouping keys - with empty input (691 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - with grouping keys - with non-empty input (1 second, 884 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - without grouping keys - with empty input (358 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe] - without grouping keys - with non-empty input (278 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - with grouping keys - with empty input (818 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - with grouping keys - with non-empty input (3 seconds, 84 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - without grouping keys - with empty input (685 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with distinct] - without grouping keys - with non-empty input (1 second, 142 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - with grouping keys - with empty input (930 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 469 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - without grouping keys - with empty input (749 milliseconds)
[info] - randomized aggregation test - [typed, with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 197 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with empty input (988 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with non-empty input (2 seconds, 851 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with empty input (717 milliseconds)
[info] - randomized aggregation test - [with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 241 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with empty input (1 second, 24 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - with grouping keys - with non-empty input (3 seconds, 569 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with empty input (770 milliseconds)
[info] - randomized aggregation test - [typed, with partial + unsafe, with partial + safe, with distinct] - without grouping keys - with non-empty input (1 second, 315 milliseconds)
[info] - SPARK-18403 Fix unsafe data false sharing issue in ObjectHashAggregateExec (1 second, 371 milliseconds)
18:03:21.642 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:21.642 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:21.642 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:03:21.693 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:21.693 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:21.693 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] TestHiveSuite:
18:03:21.719 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src specified for non-external table:src
18:03:22.444 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src does not exist; Force to delete it.
18:03:22.444 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/warehouse-bd84234e-a890-4aed-bfed-9261a826502f/src
18:03:22.476 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.476 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.476 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:03:22.524 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.524 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.524 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:03:22.594 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.594 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.594 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:03:22.637 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.637 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.637 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] - load test table based on case sensitivity (939 milliseconds)
[info] - SPARK-15887: hive-site.xml should be loaded (1 millisecond)
18:03:22.696 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.696 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.696 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
18:03:22.737 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
18:03:22.737 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
18:03:22.738 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] ClasspathDependenciesSuite:
[info] - shaded Protobuf (4 milliseconds)
[info] - shaded Kryo (0 milliseconds)
[info] - hive-common (1 millisecond)
[info] - hive-exec (0 milliseconds)
[info] - Forbidden Dependencies (4 milliseconds)
[info] - parquet-hadoop-bundle (0 milliseconds)
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaMetastoreDataSourcesSuite.saveTableAndQueryIt started
18:03:23.502 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`javasavedtable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[info] Test run finished: 0 failed, 0 ignored, 1 total, 0.912s
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.testUDAF started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.saveTableAndQueryIt started
[info] Test run finished: 0 failed, 0 ignored, 2 total, 2.283s
[info] ScalaTest
[info] Run completed in 1 hour, 50 minutes, 38 seconds.
[info] Total number of tests run: 3435
[info] Suites: completed 125, aborted 0
[info] Tests: succeeded 3435, failed 0, canceled 0, ignored 595, pending 0
[info] All tests passed.
[info] Passed: Total 3438, Failed 0, Errors 0, Passed 3438, Ignored 595
[error] (sql/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 6680 s, completed Nov 23, 2019 6:03:58 PM
[error] running /home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/build/sbt -Phadoop-3.2 -Phive-2.3 -Pmesos -Pkubernetes -Pkinesis-asl -Pspark-ganglia-lgpl -Phadoop-cloud -Phive -Pyarn -Phive-thriftserver test ; received return code 1
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Finished: FAILURE