FailedConsole Output

Skipping 383 KB.. Full Log
2Yj/Tagmf50wMjD4M7CWJ6SCJEgYhn6zEskT9nMS8dP3gkqLMvHTriiIGKaihyfl5xfk5qXrOEBpkDgMEMDIxMFQUlDDI2RQXJOYpFJdU5qTaKoEttlJQdnZxdjEwsFayAwBWc/G4pQAAAA==warn]                               ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:125: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]         Resource.newBuilder().setRole("role2")
[warn]                               ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:139: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     ).thenReturn(Status.valueOf(1))
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:152: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]     assert(cpus.exists(_.getRole() == "role2"))
[warn]                          ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:153: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]     assert(cpus.exists(_.getRole() == "*"))
[warn]                          ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:156: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]     assert(mem.exists(_.getRole() == "role2"))
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:157: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]     assert(mem.exists(_.getRole() == "*"))
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:419: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]         Resource.newBuilder().setRole("*")
[warn]                               ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala:422: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]         Resource.newBuilder().setRole("*")
[warn]                               ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:280: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     ).thenReturn(Status.valueOf(1))
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:281: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     when(driver.declineOffer(mesosOffers.get(1).getId)).thenReturn(Status.valueOf(1))
[warn]                                                                           ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:282: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     when(driver.declineOffer(mesosOffers.get(2).getId)).thenReturn(Status.valueOf(1))
[warn]                                                                           ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:308: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     when(driver.declineOffer(mesosOffers2.get(0).getId)).thenReturn(Status.valueOf(1))
[warn]                                                                            ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:335: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]       .setRole("prod")
[warn]        ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:339: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]       .setRole("prod")
[warn]        ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:344: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]       .setRole("dev")
[warn]        ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:349: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]       .setRole("dev")
[warn]        ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:391: method valueOf in Java enum Status is deprecated: see corresponding Javadoc for more information.
[warn]     ).thenReturn(Status.valueOf(1))
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:408: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]     assert(cpusDev.getRole.equals("dev"))
[warn]                    ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:411: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]       r.getName.equals("mem") && r.getScalar.getValue.equals(484.0) && r.getRole.equals("prod")
[warn]                                                                          ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala:414: method getRole in class Resource is deprecated: see corresponding Javadoc for more information.
[warn]       r.getName.equals("cpus") && r.getScalar.getValue.equals(1.0) && r.getRole.equals("prod")
[warn]                                                                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala:54: method setRole in class Builder is deprecated: see corresponding Javadoc for more information.
[warn]     role.foreach { r => builder.setRole(r) }
[warn]                                 ^
[warn] 24 warnings found
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kafka-0-10/target/scala-2.12/spark-streaming-kafka-0-10_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/mesos/target/scala-2.12/spark-mesos_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/graphx/target/scala-2.12/spark-graphx_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[info] Done updating.
[warn] Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:
[warn] 
[warn] 	* io.netty:netty:3.9.9.Final is selected over {3.6.2.Final, 3.7.0.Final}
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 3.9.9.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 3.6.2.Final)
[warn] 	    +- org.apache.zookeeper:zookeeper:3.4.6               (depends on 3.6.2.Final)
[warn] 
[warn] 	* org.scala-lang.modules:scala-xml_2.12:1.2.0 is selected over 1.0.6
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 1.0.6)
[warn] 	    +- org.scala-lang:scala-compiler:2.12.8               (depends on 1.0.6)
[warn] 
[warn] 	* io.netty:netty-all:4.1.30.Final is selected over 4.0.23.Final
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 4.0.23.Final)
[warn] 	    +- org.apache.spark:spark-network-common_2.12:3.0.0-SNAPSHOT (depends on 4.0.23.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 4.0.23.Final)
[warn] 
[warn] Run 'evicted' to see detailed eviction warnings
[info] Updating {file:/home/jenkins/workspace/NewSparkPullRequestBuilder/}graph...
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/kubernetes/core/target/scala-2.12/spark-kubernetes_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala:451: unreachable code
[warn]       (rs: ResultSet, row: InternalRow, pos: Int) =>
[warn]                                                   ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala:271: method write in class IOUtils is deprecated: see corresponding Javadoc for more information.
[warn]           IOUtils.write(text, out)
[warn]                   ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala:870: method write in class FileUtils is deprecated: see corresponding Javadoc for more information.
[warn]     FileUtils.write(new File(fakeCheckpointFile.toString()), "blablabla")
[warn]               ^
[warn] Multiple main classes detected.  Run 'show discoveredMainClasses' to see the list
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/resource-managers/yarn/target/scala-2.12/spark-yarn_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[info] Done updating.
[warn] Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:
[warn] 
[warn] 	* io.netty:netty:3.9.9.Final is selected over {3.6.2.Final, 3.7.0.Final}
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 3.9.9.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 3.6.2.Final)
[warn] 	    +- org.apache.zookeeper:zookeeper:3.4.6               (depends on 3.6.2.Final)
[warn] 
[warn] 	* org.scala-lang.modules:scala-xml_2.12:1.2.0 is selected over 1.0.6
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 1.0.6)
[warn] 	    +- org.scala-lang:scala-compiler:2.12.8               (depends on 1.0.6)
[warn] 
[warn] 	* io.netty:netty-all:4.1.30.Final is selected over 4.0.23.Final
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 4.0.23.Final)
[warn] 	    +- org.apache.spark:spark-network-common_2.12:3.0.0-SNAPSHOT (depends on 4.0.23.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 4.0.23.Final)
[warn] 
[warn] Run 'evicted' to see detailed eviction warnings
[info] Updating {file:/home/jenkins/workspace/NewSparkPullRequestBuilder/}assembly...
[warn] two warnings found
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala:283: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]               WriteToDataSourceV2(write, df.logicalPlan)
[warn]               ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:262: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]         new org.apache.parquet.hadoop.ParquetInputSplit(
[warn]                                       ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:273: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]         ParquetFileReader.readFooter(sharedConf, filePath, SKIP_ROW_GROUPS).getFileMetaData
[warn]                           ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:447: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]           ParquetFileReader.readFooter(
[warn]                             ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:121: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]           Option[TimeZone]) => RecordReader[Void, T]): RecordReader[Void, T] = {
[warn]                             ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:126: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       new org.apache.parquet.hadoop.ParquetInputSplit(
[warn]                                     ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:135: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]       ParquetFileReader.readFooter(conf, filePath, SKIP_ROW_GROUPS).getFileMetaData
[warn]                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:184: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       split: ParquetInputSplit,
[warn]              ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:213: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       split: ParquetInputSplit,
[warn]              ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn]                                                   ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn]            ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala:96: object ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn]     case ContinuousTrigger(t) => ProcessingTimeExecutor(ProcessingTime(t), triggerClock)
[warn]                                                         ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/WriteToMicroBatchDataSource.scala:36: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]   def createPlan(batchId: Long): WriteToDataSourceV2 = {
[warn]                                  ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/WriteToMicroBatchDataSource.scala:37: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]     WriteToDataSourceV2(new MicroBatchWrite(batchId, write), query)
[warn]     ^
[info] Done updating.
[warn] Found version conflict(s) in library dependencies; some are suspected to be binary incompatible:
[warn] 
[warn] 	* org.apache.thrift:libthrift:0.12.0 is selected over 0.9.3
[warn] 	    +- org.apache.spark:spark-hive_2.12:3.0.0-SNAPSHOT    (depends on 0.9.3)
[warn] 	    +- org.apache.thrift:libfb303:0.9.3                   (depends on 0.9.3)
[warn] 
[warn] 	* io.netty:netty:3.9.9.Final is selected over {3.6.2.Final, 3.7.0.Final}
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 3.9.9.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 3.6.2.Final)
[warn] 	    +- org.apache.zookeeper:zookeeper:3.4.6               (depends on 3.6.2.Final)
[warn] 
[warn] 	* org.scala-lang.modules:scala-xml_2.12:1.2.0 is selected over 1.0.6
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 1.0.6)
[warn] 	    +- org.scala-lang:scala-compiler:2.12.8               (depends on 1.0.6)
[warn] 
[warn] 	* io.netty:netty-all:4.1.30.Final is selected over 4.0.23.Final
[warn] 	    +- org.apache.spark:spark-core_2.12:3.0.0-SNAPSHOT    (depends on 4.0.23.Final)
[warn] 	    +- org.apache.spark:spark-network-common_2.12:3.0.0-SNAPSHOT (depends on 4.0.23.Final)
[warn] 	    +- org.apache.hadoop:hadoop-hdfs:2.7.4                (depends on 4.0.23.Final)
[warn] 
[warn] Run 'evicted' to see detailed eviction warnings
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/streaming/target/scala-2.12/spark-streaming_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Compiling 8 Scala sources and 2 Java sources to /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/target/scala-2.12/test-classes...
[info] Done packaging.
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisInputDStreamBuilderSuite.scala:167: method initialPositionInStream in class Builder is deprecated (since 2.3.0): use initialPosition(initialPosition: KinesisInitialPosition)
[warn]         .initialPositionInStream(InitialPositionInStream.AT_TIMESTAMP)
[warn]          ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala:106: method createStream in object KinesisUtils is deprecated (since 2.2.0): Use KinesisInputDStream.builder instead
[warn]     val kinesisStream1 = KinesisUtils.createStream(ssc, "myAppName", "mySparkStream",
[warn]                                       ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala:109: method createStream in object KinesisUtils is deprecated (since 2.2.0): Use KinesisInputDStream.builder instead
[warn]     val kinesisStream2 = KinesisUtils.createStream(ssc, "myAppName", "mySparkStream",
[warn]                                       ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala:116: method createStream in object KinesisUtils is deprecated (since 2.2.0): Use KinesisInputDStream.builder instead
[warn]     val inputStream = KinesisUtils.createStream(ssc, appName, "dummyStream",
[warn]                                    ^
[warn] four warnings found
[info] Note: Some input files use or override a deprecated API.
[info] Note: Recompile with -Xlint:deprecation for details.
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/external/kinesis-asl/target/scala-2.12/spark-streaming-kinesis-asl_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala:686: match may not be exhaustive.
[warn] It would fail on the following inputs: (Object(), _), (_, Object()), (_, _)
[warn]           after.asInstanceOf[TreeNode[_]].children).foreach {
[warn]                                                             ^
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtilSuite.scala:88: method writeStringToFile in class FileUtils is deprecated: see corresponding Javadoc for more information.
[warn]     FileUtils.writeStringToFile(tempFile, secretValue)
[warn]               ^
[warn] 17 warnings found
[info] Note: Some input files use or override a deprecated API.
[info] Note: Recompile with -Xlint:deprecation for details.
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/WriteToMicroBatchDataSource.scala:36: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]   def createPlan(batchId: Long): WriteToDataSourceV2 = {
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/sources/WriteToMicroBatchDataSource.scala:37: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]     WriteToDataSourceV2(new MicroBatchWrite(batchId, write), query)
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetWriteBuilder.scala:91: value ENABLE_JOB_SUMMARY in class ParquetOutputFormat is deprecated: see corresponding Javadoc for more information.
[warn]       && conf.get(ParquetOutputFormat.ENABLE_JOB_SUMMARY) == null) {
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:128: value ENABLE_JOB_SUMMARY in class ParquetOutputFormat is deprecated: see corresponding Javadoc for more information.
[warn]       && conf.get(ParquetOutputFormat.ENABLE_JOB_SUMMARY) == null) {
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:262: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]         new org.apache.parquet.hadoop.ParquetInputSplit(
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:273: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]         ParquetFileReader.readFooter(sharedConf, filePath, SKIP_ROW_GROUPS).getFileMetaData
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala:447: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]           ParquetFileReader.readFooter(
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala:283: class WriteToDataSourceV2 in package v2 is deprecated (since 2.4.0): Use specific logical plans like AppendData instead
[warn]               WriteToDataSourceV2(write, df.logicalPlan)
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/TriggerExecutor.scala:46: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn] case class ProcessingTimeExecutor(processingTime: ProcessingTime, clock: Clock = new SystemClock())
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:121: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]           Option[TimeZone]) => RecordReader[Void, T]): RecordReader[Void, T] = {
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:126: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       new org.apache.parquet.hadoop.ParquetInputSplit(
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:135: method readFooter in class ParquetFileReader is deprecated: see corresponding Javadoc for more information.
[warn]       ParquetFileReader.readFooter(conf, filePath, SKIP_ROW_GROUPS).getFileMetaData
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:184: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       split: ParquetInputSplit,
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetPartitionReaderFactory.scala:213: class ParquetInputSplit in package hadoop is deprecated: see corresponding Javadoc for more information.
[warn]       split: ParquetInputSplit,
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala:96: object ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn]     case ContinuousTrigger(t) => ProcessingTimeExecutor(ProcessingTime(t), triggerClock)
[warn] 
[warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousExecution.scala:96: class ProcessingTime in package streaming is deprecated (since 2.2.0): use Trigger.ProcessingTime(intervalMs)
[warn]     case ContinuousTrigger(t) => ProcessingTimeExecutor(ProcessingTime(t), triggerClock)
[warn] 
[error] [warn] /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala:451: unreachable code
[error] [warn]       (rs: ResultSet, row: InternalRow, pos: Int) =>
[error] [warn] 
[warn] two warnings found
[warn] Multiple main classes detected.  Run 'show discoveredMainClasses' to see the list
[info] Packaging /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/catalyst/target/scala-2.12/spark-catalyst_2.12-3.0.0-SNAPSHOT-tests.jar ...
[info] Done packaging.
java.lang.RuntimeException: 1 fatal warnings
	at scala.sys.package$.error(package.scala:27)
	at SparkBuild$$anonfun$sharedSettings$21.apply(SparkBuild.scala:315)
	at SparkBuild$$anonfun$sharedSettings$21.apply(SparkBuild.scala:284)
	at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
	at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40)
	at sbt.std.Transform$$anon$4.work(System.scala:63)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
	at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
	at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
	at sbt.Execute.work(Execute.scala:237)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
	at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
	at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
	at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
[error] (sql/compile:compile) 1 fatal warnings
[error] Total time: 191 s, completed Jul 8, 2019 12:53:32 PM
[error] running /home/jenkins/workspace/NewSparkPullRequestBuilder/build/sbt -Phadoop-2.7 -Pkubernetes -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Pyarn -Pspark-ganglia-lgpl -Phive -Pmesos test:package streaming-kinesis-asl-assembly/assembly ; received return code 1
Attempting to post to Github...
 > Post successful.
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
ERROR: Step ?Publish JUnit test result report? failed: No test report files were found. Configuration error?
Finished: FAILURE