Test Result

83 failures (+82) , 76 skipped (-610)
18,883 tests (-4030)
Took 13 hr.

All Failed Tests

Test NameDurationAge
 org.apache.spark.scheduler.BarrierTaskContextSuite.support multiple barrier() call within a single task23 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceStressForDontFailOnDataLossSuite.stress test for failOnDataLoss=false57 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.cannot stop Kafka stream1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from latest offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from earliest offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from specific offsets (failOnDataLoss: true)1 min 4 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from specific timestamps (failOnDataLoss: true)1 min 3 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from latest offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from earliest offsets (failOnDataLoss: true)1 min 11 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from specific offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from specific timestamps (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from latest offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from earliest offsets (failOnDataLoss: true)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from specific offsets (failOnDataLoss: true)1 min 2 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from specific timestamps (failOnDataLoss: true)1 min 2 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from latest offsets (failOnDataLoss: false)1 min 9 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from earliest offsets (failOnDataLoss: false)1 min 1 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from specific offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.assign from specific timestamps (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from latest offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from earliest offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from specific offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by name from specific timestamps (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from latest offsets (failOnDataLoss: false)1 min 1 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from earliest offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from specific offsets (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.subscribing topic by pattern from specific timestamps (failOnDataLoss: false)1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.Kafka column types1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.read Kafka transactional messages: read_committed1 min 1 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.read Kafka transactional messages: read_uncommitted1 min 44 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceSuite.SPARK-27494: read kafka record containing null key/values1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaContinuousSourceTopicDeletionSuite.subscribing topic by pattern with topic deletions2 min 46 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV1SourceSuite.subscribing topic by pattern from specific timestamps (failOnDataLoss: true)57 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV1SourceSuite.assign from earliest offsets (failOnDataLoss: false)40 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV1SourceSuite.delete a topic when a Spark job is running14 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV1SourceSuite.read Kafka transactional messages: read_committed1 min 1 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV1SourceSuite.read Kafka transactional messages: read_uncommitted1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.subscribing topic by pattern with topic deletions1 min 20 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.0 (SPARK-19517)1 min 14 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.KafkaSource with watermark1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.delete a topic when a Spark job is running1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.SPARK-22956: currentPartitionOffsets should be set when no new data comes in45 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.id prefix1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.id override1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.ensure stream-stream self-join generates only one offset in log and correct metrics1 min 1 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.read Kafka transactional messages: read_committed1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.read Kafka transactional messages: read_uncommitted1 min 24 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.reset should reset all fields1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.SPARK-27494: read kafka record containing null key/values1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.SPARK-30656: minPartitions1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.V2 Source is used by default1 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.minPartitions is supported10 sec1
 org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.41 min 0 sec1
 org.apache.spark.sql.kafka010.KafkaRelationSuiteV2.reuse same dataframe in query16 sec1
 org.apache.spark.sql.kafka010.KafkaRelationSuiteV2.test late binding start offsets44 sec1
 org.apache.spark.sql.kafka010.KafkaSourceStressSuite.stress test with multiple topics and partitions5 min 49 sec1
 org.apache.spark.sql.kafka010.consumer.KafkaDataConsumerSuite.SPARK-25151 Handles multiple tasks in executor fetching same (topic, partition) pair and same offset (edge-case) - data not in use1 min 41 sec1
 org.apache.spark.streaming.kafka010.KafkaDataConsumerSuite.(It is not a test it is a sbt.testing.SuiteSelector)32 sec1
 org.apache.spark.sql.DataFrameTimeWindowingSuite.(It is not a test it is a sbt.testing.SuiteSelector)38 ms1
 org.apache.spark.sql.DatasetCacheSuite.(It is not a test it is a sbt.testing.SuiteSelector)37 ms1
 org.apache.spark.sql.JoinSuite.(It is not a test it is a sbt.testing.SuiteSelector)41 ms1
 org.apache.spark.sql.connector.V2CommandsCaseSensitivitySuite.(It is not a test it is a sbt.testing.SuiteSelector)39 ms1
 org.apache.spark.sql.execution.AggregatingAccumulatorSuite.(It is not a test it is a sbt.testing.SuiteSelector)36 ms1
 org.apache.spark.sql.execution.OptimizeMetadataOnlyQuerySuite.(It is not a test it is a sbt.testing.SuiteSelector)39 ms1
 org.apache.spark.sql.execution.SQLWindowFunctionSuite.(It is not a test it is a sbt.testing.SuiteSelector)41 ms1
 org.apache.spark.sql.execution.WholeStageCodegenSparkSubmitSuite.Generated code on driver should not embed platform-specific constant3 min 0 sec1
 org.apache.spark.sql.execution.adaptive.AdaptiveQueryExecSuite.(It is not a test it is a sbt.testing.SuiteSelector)31 ms1
 org.apache.spark.sql.execution.streaming.CompactibleFileStreamLogSuite.(It is not a test it is a sbt.testing.SuiteSelector)46 ms1
 org.apache.spark.sql.execution.ui.SQLAppStatusListenerMemoryLeakSuite.no memory leak34 ms1
 org.apache.spark.sql.execution.ui.SQLAppStatusListenerSuite.(It is not a test it is a sbt.testing.SuiteSelector)67 ms1
 org.apache.spark.sql.streaming.StreamingOuterJoinSuite.SPARK-26187 self right outer join should not return outer nulls for already matched rows1.5 sec1
 org.apache.spark.sql.streaming.StreamingOuterJoinSuite.437 ms1
 org.apache.spark.sql.streaming.StreamingOuterJoinSuite.SPARK-29438: ensure UNION doesn't lead stream-stream join to use shifted partition IDs37 ms1
 org.apache.spark.sql.streaming.continuous.ContinuousStressSuite.restarts15 sec1
 org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite.(It is not a test it is a sbt.testing.SuiteSelector)10 min1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.temporary Hive UDF: define a UDF and use it5 min 5 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.permanent Hive UDF: define a UDF and use it5 min 1 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.SPARK-11009 fix wrong result of Window function in cluster mode5 min 3 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.SPARK-14244 fix window partition size attribute binding failure5 min 3 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.dir5 min 0 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.dir5 min 9 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.ConnectionURL5 min 20 sec1
 org.apache.spark.sql.hive.HiveSparkSubmitSuite.SPARK-18989: DESC TABLE should not fail with format class not found5 min 0 sec1

All Tests

PackageDurationFail(diff)Skip(diff)Pass(diff)Total(diff)
org.apache.spark31 min01454455
org.apache.spark.api.java0.12 sec0099
org.apache.spark.api.python2.5 sec0088
org.apache.spark.api.r6 ms0033
org.apache.spark.broadcast56 sec001717
org.apache.spark.deploy3 min 1 sec019495
org.apache.spark.deploy.client96 ms0022
org.apache.spark.deploy.history2 min 23 sec00165165
org.apache.spark.deploy.k8s0.18 sec001717
org.apache.spark.deploy.k8s.features11 sec005353
org.apache.spark.deploy.k8s.submit1.3 sec001111
org.apache.spark.deploy.master55 sec003030
org.apache.spark.deploy.master.ui0.26 sec0022
org.apache.spark.deploy.mesos0.2 sec0055
org.apache.spark.deploy.rest6.5 sec002727
org.apache.spark.deploy.rest.mesos0.49 sec0033
org.apache.spark.deploy.security1 min 1 sec0055
org.apache.spark.deploy.worker9.3 sec003232
org.apache.spark.deploy.worker.ui0.98 sec0011
org.apache.spark.deploy.yarn26 sec009898
org.apache.spark.executor10 sec003232
org.apache.spark.graphx1 min 44 sec005757
org.apache.spark.graphx.impl2.8 sec001818
org.apache.spark.graphx.lib7 min 36 sec002727
org.apache.spark.graphx.util5.2 sec0066
org.apache.spark.input3.6 sec0033
org.apache.spark.internal1 ms0011
org.apache.spark.internal.config0.1 sec002727
org.apache.spark.internal.io9 ms0066
org.apache.spark.internal.plugin22 sec0055
org.apache.spark.io5.9 sec004040
org.apache.spark.kafka0104.7 sec004545
org.apache.spark.launcher54 sec005353
org.apache.spark.memory0.74 sec002929
org.apache.spark.metrics4 sec003535
org.apache.spark.metrics.sink1 sec0066
org.apache.spark.metrics.source1.9 sec0077
org.apache.spark.ml13 sec001919
org.apache.spark.ml.ann1.4 sec0033
org.apache.spark.ml.attribute0.13 sec001818
org.apache.spark.ml.classification25 min02196198
org.apache.spark.ml.clustering4 min 16 sec006060
org.apache.spark.ml.evaluation31 sec002727
org.apache.spark.ml.feature7 min 29 sec00378378
org.apache.spark.ml.fpm23 sec001111
org.apache.spark.ml.impl8 ms0011
org.apache.spark.ml.linalg9.2 sec009191
org.apache.spark.ml.optim13 sec001212
org.apache.spark.ml.optim.aggregator4.8 sec002626
org.apache.spark.ml.optim.loss0.83 sec0044
org.apache.spark.ml.param0.78 sec001010
org.apache.spark.ml.param.shared56 ms0011
org.apache.spark.ml.python7 ms0033
org.apache.spark.ml.r0.55 sec0011
org.apache.spark.ml.recommendation8 min 46 sec003636
org.apache.spark.ml.regression13 min03123126
org.apache.spark.ml.source.image3.8 sec0077
org.apache.spark.ml.source.libsvm7.8 sec001212
org.apache.spark.ml.stat1 min 6 sec018889
org.apache.spark.ml.stat.distribution0.84 sec0044
org.apache.spark.ml.tree.impl52 sec002828
org.apache.spark.ml.tuning3 min 35 sec002222
org.apache.spark.ml.util12 sec002222
org.apache.spark.mllib.api.python50 ms0055
org.apache.spark.mllib.classification5 min 4 sec004646
org.apache.spark.mllib.clustering3 min 56 sec006464
org.apache.spark.mllib.evaluation3.7 sec002222
org.apache.spark.mllib.feature12 sec003636
org.apache.spark.mllib.fpm9.5 sec002121
org.apache.spark.mllib.linalg5.5 sec009696
org.apache.spark.mllib.linalg.distributed37 sec005252
org.apache.spark.mllib.optimization54 sec001313
org.apache.spark.mllib.pmml.export0.41 sec001111
org.apache.spark.mllib.random19 sec002424
org.apache.spark.mllib.rdd6.8 sec0033
org.apache.spark.mllib.recommendation2 min 7 sec002626
org.apache.spark.mllib.regression3 min 37 sec005353
org.apache.spark.mllib.stat39 sec013435
org.apache.spark.mllib.stat.distribution0.16 sec0055
org.apache.spark.mllib.tree45 sec003939
org.apache.spark.mllib.util29 sec002525
org.apache.spark.network1 min 0 sec004242
org.apache.spark.network.crypto6.7 sec001717
org.apache.spark.network.netty1.4 sec001414
org.apache.spark.network.protocol0.11 sec0066
org.apache.spark.network.sasl18 sec001616
org.apache.spark.network.server0.21 sec0033
org.apache.spark.network.shuffle17 sec006161
org.apache.spark.network.util33 sec001010
org.apache.spark.network.yarn9.2 sec001313
org.apache.spark.partial41 ms001010
org.apache.spark.rdd3 min 20 sec00241241
org.apache.spark.repl3 min 28 sec004040
org.apache.spark.resource33 sec002727
org.apache.spark.rpc5 ms0055
org.apache.spark.rpc.netty2.7 sec005050
org.apache.spark.scheduler11 min1+10317-1318
org.apache.spark.scheduler.cluster7.7 sec0022
org.apache.spark.scheduler.cluster.k8s1.9 sec002323
org.apache.spark.scheduler.cluster.mesos53 sec009999
org.apache.spark.scheduler.dynalloc0.11 sec001212
org.apache.spark.security13 sec001010
org.apache.spark.serializer15 sec00110110
org.apache.spark.shuffle25 sec0055
org.apache.spark.shuffle.sort8.6 sec004848
org.apache.spark.shuffle.sort.io1.4 sec0022
org.apache.spark.sql2 hr 6 min3+362706-622715-59
org.apache.spark.sql.api.python50 ms0022
org.apache.spark.sql.api.r0.22 sec0011
org.apache.spark.sql.avro3 min 23 sec00194194
org.apache.spark.sql.catalyst1.2 sec004949
org.apache.spark.sql.catalyst.analysis31 sec00555555
org.apache.spark.sql.catalyst.catalog10 sec00160160
org.apache.spark.sql.catalyst.csv1.6 sec003333
org.apache.spark.sql.catalyst.encoders59 sec00380380
org.apache.spark.sql.catalyst.expressions23 min00804804
org.apache.spark.sql.catalyst.expressions.aggregate20 sec005050
org.apache.spark.sql.catalyst.expressions.codegen1 min 33 sec006767
org.apache.spark.sql.catalyst.expressions.xml3.8 sec002020
org.apache.spark.sql.catalyst.json0.28 sec001515
org.apache.spark.sql.catalyst.optimizer31 sec01615616
org.apache.spark.sql.catalyst.parser9.8 sec00263263
org.apache.spark.sql.catalyst.planning53 ms0088
org.apache.spark.sql.catalyst.plans1 sec003838
org.apache.spark.sql.catalyst.plans.logical0.1 sec001313
org.apache.spark.sql.catalyst.statsEstimation1.4 sec00114114
org.apache.spark.sql.catalyst.streaming10 ms0022
org.apache.spark.sql.catalyst.trees0.36 sec002929
org.apache.spark.sql.catalyst.util9.9 sec02241243
org.apache.spark.sql.connector3 min 16 sec1+10502-14503-13
org.apache.spark.sql.connector.catalog2.6 sec007373
org.apache.spark.sql.connector.expressions72 ms0066
org.apache.spark.sql.execution18 min4+43-1520-26527-23
org.apache.spark.sql.execution.adaptive31 ms1+100-311-30
org.apache.spark.sql.execution.arrow7 sec003535
org.apache.spark.sql.execution.columnar44 sec00187187
org.apache.spark.sql.execution.columnar.compression2.9 sec00118118
org.apache.spark.sql.execution.command5 min 34 sec00242242
org.apache.spark.sql.execution.datasources7 min 32 sec00226226
org.apache.spark.sql.execution.datasources.binaryfile2.8 sec0099
org.apache.spark.sql.execution.datasources.csv13 min00354354
org.apache.spark.sql.execution.datasources.jdbc29 ms0011
org.apache.spark.sql.execution.datasources.json6 min 25 sec02336338
org.apache.spark.sql.execution.datasources.noop4.8 sec0066
org.apache.spark.sql.execution.datasources.orc9 min 59 sec01313314
org.apache.spark.sql.execution.datasources.parquet47 min05557562
org.apache.spark.sql.execution.datasources.text32 sec004040
org.apache.spark.sql.execution.datasources.v21.6 sec006161
org.apache.spark.sql.execution.debug1 sec0077
org.apache.spark.sql.execution.history0.6 sec0022
org.apache.spark.sql.execution.joins1 min 43 sec02181183
org.apache.spark.sql.execution.metric36 sec002727
org.apache.spark.sql.execution.python18 sec002525
org.apache.spark.sql.execution.streaming37 sec1+1141-1343-12
org.apache.spark.sql.execution.streaming.continuous.shuffle0.28 sec001616
org.apache.spark.sql.execution.streaming.sources40 sec004242
org.apache.spark.sql.execution.streaming.state30 sec004242
org.apache.spark.sql.execution.ui0.76 sec2+202-124-10
org.apache.spark.sql.execution.vectorized1 min 22 sec006262
org.apache.spark.sql.expressions10 sec0088
org.apache.spark.sql.hive1 hr 47 min9+92397-276408-267
org.apache.spark.sql.hive.client13 ms00-111-112211-1123
org.apache.spark.sql.hive.execution11 min02-589282-1253284-1842
org.apache.spark.sql.hive.orc8 min 54 sec02133-12135-12
org.apache.spark.sql.internal43 sec0074-174-1
org.apache.spark.sql.jdbc28 sec01115116
org.apache.spark.sql.kafka0102 hr 10 min55+550187-55242
org.apache.spark.sql.kafka010.consumer16 min1+1017-118
org.apache.spark.sql.kafka010.producer0.5 sec0044
org.apache.spark.sql.sources20 min02-2322-62324-64
org.apache.spark.sql.streaming19 min3+30406-3409
org.apache.spark.sql.streaming.continuous2 min 19 sec1+1040-141
org.apache.spark.sql.streaming.sources2.8 sec002727
org.apache.spark.sql.streaming.test2 min 27 sec002626
org.apache.spark.sql.streaming.ui12 sec0077
org.apache.spark.sql.test52 sec006060
org.apache.spark.sql.types0.78 sec02191193
org.apache.spark.sql.util9.5 sec014445
org.apache.spark.status2.8 sec003131
org.apache.spark.status.api.v10.5 sec0011
org.apache.spark.storage54 sec00238238
org.apache.spark.streaming10 min01243244
org.apache.spark.streaming.api.java30 ms0011
org.apache.spark.streaming.kafka0102 min 59 sec1+1022-323-2
org.apache.spark.streaming.kinesis5 sec0243761
org.apache.spark.streaming.rdd22 sec001313
org.apache.spark.streaming.receiver0.74 sec0066
org.apache.spark.streaming.scheduler32 sec002323
org.apache.spark.streaming.scheduler.rate0.65 sec001111
org.apache.spark.streaming.ui3.6 sec0088
org.apache.spark.streaming.util12 sec005555
org.apache.spark.ui1 min 23 sec024547
org.apache.spark.ui.scope2 ms0011
org.apache.spark.ui.storage0.68 sec0055
org.apache.spark.unsafe55 ms0088
org.apache.spark.unsafe.array1 ms0011
org.apache.spark.unsafe.hash1.4 sec0066
org.apache.spark.unsafe.map8.1 sec003030
org.apache.spark.unsafe.types0.9 sec016162
org.apache.spark.util1 min 0 sec00174174
org.apache.spark.util.collection7 min 43 sec03123126
org.apache.spark.util.collection.unsafe.sort7.1 sec008282
org.apache.spark.util.io25 ms0088
org.apache.spark.util.kvstore19 sec01104105
org.apache.spark.util.logging1 sec0011
org.apache.spark.util.random6.4 sec003232
org.apache.spark.util.sketch32 sec002929
test.org.apache.spark1 min 43 sec00109109
test.org.apache.spark.streaming1 min 20 sec007979