Test Result

201 failures (+201) , 817 skipped (-232)
29,964 tests (-3362)
Took 5 hr 59 min.

All Failed Tests

Test NameDurationAge
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.2.2: username of HiveClient - no UGI5.7 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.2.2: username of HiveClient - UGI5.4 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.2.2: username of HiveClient - Proxy user5.3 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.0: username of HiveClient - no UGI7.4 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.0: username of HiveClient - UGI7.5 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.0: username of HiveClient - Proxy user7.3 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.1: username of HiveClient - no UGI7.4 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.1: username of HiveClient - UGI7.5 sec1
 org.apache.spark.sql.hive.client.HiveClientUserNameSuite.3.1: username of HiveClient - Proxy user7.9 sec1
 org.apache.spark.sql.hive.client.HivePartitionFilteringSuites.(It is not a test it is a sbt.testing.NestedSuiteSelector)7.8 sec1
 org.apache.spark.sql.hive.client.HivePartitionFilteringSuites.(It is not a test it is a sbt.testing.NestedSuiteSelector)7.9 sec1
 org.apache.spark.sql.hive.client.HivePartitionFilteringSuites.(It is not a test it is a sbt.testing.NestedSuiteSelector)7.6 sec1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: create client6.2 sec1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: create/get/alter database should pick right user name as owner1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createDatabase with null description9 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: setCurrentDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: databaseExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: listDatabases0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: dropDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: loadTable0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: tableExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTable0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTableOption1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTablesByName0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTablesByName when multiple tables1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTablesByName when some tables do not exist0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTablesByName when contains invalid name0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getTablesByName when empty1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable(table: CatalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable - should respect the original catalog table's owner name1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable(dbName: String, tableName: String, table: CatalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable - rename1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable - change database1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterTable - change database and table names0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: listTables(database)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: listTables(database, pattern)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: listTablesByType(database, pattern, tableType)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: dropTable0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: sql create partitioned table1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitionNames(catalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitions(catalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitionsByFilter1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartition0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitionOption(db: String, table: String, spec: TablePartitionSpec)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitionOption(table: CatalogTable, spec: TablePartitionSpec)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getPartitions(db: String, table: String)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: loadPartition0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: loadDynamicPartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: renamePartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: dropPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createPartitions if already exists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: createFunction1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: functionExists0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: renameFunction1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: alterFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getFunctionOption1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: listFunctions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: dropFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: sql set command1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: sql create index and reset0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: version0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: getConf1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: setOut0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: setInfo1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: setError0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: newSession0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: withHiveState and addJar1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.2.2: reset0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: create client8.8 sec1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: create/get/alter database should pick right user name as owner0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createDatabase with null description9 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: setCurrentDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: databaseExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: listDatabases1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: dropDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: loadTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: tableExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTableOption0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTablesByName1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTablesByName when multiple tables0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTablesByName when some tables do not exist1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTablesByName when contains invalid name0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getTablesByName when empty1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable(table: CatalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable - should respect the original catalog table's owner name0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable(dbName: String, tableName: String, table: CatalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable - rename0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable - change database1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterTable - change database and table names0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: listTables(database)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: listTables(database, pattern)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: listTablesByType(database, pattern, tableType)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: dropTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: sql create partitioned table1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createPartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitionNames(catalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitions(catalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitionsByFilter1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartition1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitionOption(db: String, table: String, spec: TablePartitionSpec)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitionOption(table: CatalogTable, spec: TablePartitionSpec)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getPartitions(db: String, table: String)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: loadPartition1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: loadDynamicPartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: renamePartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: dropPartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createPartitions if already exists0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: createFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: functionExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: renameFunction1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: alterFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getFunctionOption0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: listFunctions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: dropFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: sql set command1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: sql read hive materialized view1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: version1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: getConf1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: setOut1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: setInfo1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: setError1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: newSession0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: withHiveState and addJar0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.0: reset1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: create client8.7 sec1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: create/get/alter database should pick right user name as owner1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createDatabase with null description10 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: setCurrentDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: databaseExists0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: listDatabases0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterDatabase1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: dropDatabase0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: loadTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: tableExists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTableOption1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTablesByName1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTablesByName when multiple tables0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTablesByName when some tables do not exist1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTablesByName when contains invalid name1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getTablesByName when empty1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable(table: CatalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable - should respect the original catalog table's owner name1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable(dbName: String, tableName: String, table: CatalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable - rename1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable - change database1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterTable - change database and table names1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: listTables(database)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: listTables(database, pattern)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: listTablesByType(database, pattern, tableType)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: dropTable1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: sql create partitioned table1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitionNames(catalogTable)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitions(catalogTable)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitionsByFilter0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartition1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitionOption(db: String, table: String, spec: TablePartitionSpec)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitionOption(table: CatalogTable, spec: TablePartitionSpec)0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getPartitions(db: String, table: String)1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: loadPartition1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: loadDynamicPartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: renamePartitions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: dropPartitions1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createPartitions if already exists1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: createFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: functionExists0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: renameFunction1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: alterFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getFunctionOption1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: listFunctions0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: dropFunction0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: sql set command0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: sql read hive materialized view1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: version1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: getConf0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: setOut1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: setInfo1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: setError1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: newSession1 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: withHiveState and addJar0 ms1
 org.apache.spark.sql.hive.client.VersionsSuite.3.1: reset1 ms1

All Tests

PackageDurationFail(diff)Skip(diff)Pass(diff)Total(diff)
org.apache.spark14 min02515517
org.apache.spark.api.java0.19 sec0099
org.apache.spark.api.python0.62 sec0088
org.apache.spark.api.r7 ms0033
org.apache.spark.broadcast28 sec001717
org.apache.spark.deploy3 min 5 sec01105106
org.apache.spark.deploy.client61 ms0022
org.apache.spark.deploy.history38 sec00188188
org.apache.spark.deploy.k8s65 ms002424
org.apache.spark.deploy.k8s.features2.8 sec007878
org.apache.spark.deploy.k8s.submit3.9 sec001616
org.apache.spark.deploy.master1 min 8 sec003434
org.apache.spark.deploy.master.ui0.23 sec0044
org.apache.spark.deploy.mesos51 ms0055
org.apache.spark.deploy.rest1 sec002727
org.apache.spark.deploy.rest.mesos88 ms0033
org.apache.spark.deploy.security4.2 sec0055
org.apache.spark.deploy.worker2.5 sec003232
org.apache.spark.deploy.worker.ui0.12 sec0011
org.apache.spark.deploy.yarn10 min00124124
org.apache.spark.executor1.7 sec003636
org.apache.spark.graphx22 sec005757
org.apache.spark.graphx.impl83 ms001818
org.apache.spark.graphx.lib2 min 26 sec002828
org.apache.spark.graphx.util2 sec0066
org.apache.spark.input0.27 sec0033
org.apache.spark.internal1 ms0011
org.apache.spark.internal.config24 ms002727
org.apache.spark.internal.io13 ms001010
org.apache.spark.internal.plugin8.8 sec0077
org.apache.spark.io5.5 sec004040
org.apache.spark.kafka0101.8 sec004444
org.apache.spark.launcher15 sec005454
org.apache.spark.memory0.93 sec003030
org.apache.spark.metrics1.9 sec003535
org.apache.spark.metrics.sink48 ms0088
org.apache.spark.metrics.source0.58 sec0088
org.apache.spark.ml4.2 sec002020
org.apache.spark.ml.ann0.72 sec0033
org.apache.spark.ml.attribute49 ms001818
org.apache.spark.ml.classification8 min 57 sec02208210
org.apache.spark.ml.clustering1 min 14 sec005959
org.apache.spark.ml.evaluation15 sec003535
org.apache.spark.ml.feature2 min 20 sec00396396
org.apache.spark.ml.fpm7.6 sec001212
org.apache.spark.ml.impl9 ms0011
org.apache.spark.ml.linalg1.9 sec009292
org.apache.spark.ml.optim2.9 sec001212
org.apache.spark.ml.optim.aggregator2.4 sec003939
org.apache.spark.ml.optim.loss0.1 sec0044
org.apache.spark.ml.param0.37 sec001010
org.apache.spark.ml.param.shared3 ms0011
org.apache.spark.ml.python4 ms0033
org.apache.spark.ml.r0.24 sec0011
org.apache.spark.ml.recommendation1 min 44 sec003535
org.apache.spark.ml.regression5 min 10 sec03131134
org.apache.spark.ml.source.image1.3 sec0077
org.apache.spark.ml.source.libsvm2.4 sec001515
org.apache.spark.ml.stat16 sec019596
org.apache.spark.ml.stat.distribution0.14 sec0044
org.apache.spark.ml.tree.impl12 sec002828
org.apache.spark.ml.tuning56 sec002424
org.apache.spark.ml.util2.6 sec002222
org.apache.spark.mllib.api.python24 ms0055
org.apache.spark.mllib.classification1 min 25 sec004646
org.apache.spark.mllib.clustering1 min 6 sec006666
org.apache.spark.mllib.evaluation1.9 sec002222
org.apache.spark.mllib.feature4.3 sec003636
org.apache.spark.mllib.fpm4.6 sec002121
org.apache.spark.mllib.linalg0.34 sec009696
org.apache.spark.mllib.linalg.distributed14 sec005353
org.apache.spark.mllib.optimization20 sec001313
org.apache.spark.mllib.pmml.export49 ms001111
org.apache.spark.mllib.random11 sec002424
org.apache.spark.mllib.rdd3.4 sec0033
org.apache.spark.mllib.recommendation32 sec002626
org.apache.spark.mllib.regression53 sec005353
org.apache.spark.mllib.stat9.8 sec013435
org.apache.spark.mllib.stat.distribution32 ms0055
org.apache.spark.mllib.tree22 sec003939
org.apache.spark.mllib.util14 sec002828
org.apache.spark.network33 sec003838
org.apache.spark.network.client2.7 sec0088
org.apache.spark.network.crypto1.6 sec002121
org.apache.spark.network.netty0.76 sec001414
org.apache.spark.network.protocol0.22 sec001010
org.apache.spark.network.sasl1.9 sec001616
org.apache.spark.network.server43 ms0033
org.apache.spark.network.shuffle4 sec00121121
org.apache.spark.network.shuffle.protocol1 ms0022
org.apache.spark.network.util14 sec001313
org.apache.spark.network.yarn0.73 sec001919
org.apache.spark.partial20 ms001010
org.apache.spark.rdd1 min 6 sec00244244
org.apache.spark.repl1 min 30 sec004242
org.apache.spark.resource16 sec004040
org.apache.spark.rpc2 ms0055
org.apache.spark.rpc.netty3.4 sec005252
org.apache.spark.scheduler4 min 41 sec00372372
org.apache.spark.scheduler.cluster1.7 sec0022
org.apache.spark.scheduler.cluster.k8s1.6 sec004242
org.apache.spark.scheduler.cluster.mesos20 sec00105105
org.apache.spark.scheduler.dynalloc30 ms001212
org.apache.spark.security4.5 sec001010
org.apache.spark.serializer7.9 sec00117117
org.apache.spark.shuffle1 min 15 sec002626
org.apache.spark.shuffle.sort2.5 sec006363
org.apache.spark.shuffle.sort.io9 ms0022
org.apache.spark.sql43 min0365354+15390+1
org.apache.spark.sql.api.python12 ms0022
org.apache.spark.sql.api.r57 ms0011
org.apache.spark.sql.avro1 min 12 sec02271273
org.apache.spark.sql.catalyst0.3 sec007575
org.apache.spark.sql.catalyst.analysis22 sec00706706
org.apache.spark.sql.catalyst.catalog4.6 sec00162162
org.apache.spark.sql.catalyst.csv0.29 sec003131
org.apache.spark.sql.catalyst.encoders40 sec00441441
org.apache.spark.sql.catalyst.expressions14 min001202+11202+1
org.apache.spark.sql.catalyst.expressions.aggregate13 sec006161
org.apache.spark.sql.catalyst.expressions.codegen42 sec006969
org.apache.spark.sql.catalyst.expressions.xml1.2 sec002020
org.apache.spark.sql.catalyst.json0.41 sec001818
org.apache.spark.sql.catalyst.optimizer40 sec01908909
org.apache.spark.sql.catalyst.optimizer.joinReorder0.67 sec003131
org.apache.spark.sql.catalyst.parser3.6 sec00294294
org.apache.spark.sql.catalyst.planning7 ms0077
org.apache.spark.sql.catalyst.plans0.25 sec004040
org.apache.spark.sql.catalyst.plans.logical0.17 sec001515
org.apache.spark.sql.catalyst.statsEstimation0.31 sec00132132
org.apache.spark.sql.catalyst.streaming4 ms0022
org.apache.spark.sql.catalyst.trees0.25 sec003030
org.apache.spark.sql.catalyst.util5.9 sec02308310
org.apache.spark.sql.connector1 min 8 sec01608609
org.apache.spark.sql.connector.catalog0.52 sec009494
org.apache.spark.sql.connector.catalog.functions8 ms0033
org.apache.spark.sql.connector.expressions11 ms0066
org.apache.spark.sql.execution6 min 33 sec04844848
org.apache.spark.sql.execution.adaptive2 min 6 sec006969
org.apache.spark.sql.execution.aggregate0.31 sec0011
org.apache.spark.sql.execution.arrow3.2 sec004040
org.apache.spark.sql.execution.bucketing44 ms0088
org.apache.spark.sql.execution.columnar26 sec00189189
org.apache.spark.sql.execution.columnar.compression1.2 sec00119119
org.apache.spark.sql.execution.command24 sec00286286
org.apache.spark.sql.execution.command.v132 sec00119119
org.apache.spark.sql.execution.command.v211 sec009797
org.apache.spark.sql.execution.datasources1 min 32 sec00291291
org.apache.spark.sql.execution.datasources.binaryfile1 sec0099
org.apache.spark.sql.execution.datasources.csv2 min 9 sec00384384
org.apache.spark.sql.execution.datasources.jdbc9 ms0022
org.apache.spark.sql.execution.datasources.jdbc.connection0.97 sec001515
org.apache.spark.sql.execution.datasources.json1 min 33 sec00372372
org.apache.spark.sql.execution.datasources.noop1 sec0066
org.apache.spark.sql.execution.datasources.orc5 min 35 sec01456457
org.apache.spark.sql.execution.datasources.parquet20 min07724731
org.apache.spark.sql.execution.datasources.text10 sec004444
org.apache.spark.sql.execution.datasources.v20.49 sec008282
org.apache.spark.sql.execution.datasources.v2.jdbc2.2 sec001414
org.apache.spark.sql.execution.debug1.2 sec001212
org.apache.spark.sql.execution.exchange0.13 sec0033
org.apache.spark.sql.execution.history21 ms0022
org.apache.spark.sql.execution.joins1 min 7 sec02327329
org.apache.spark.sql.execution.metric12 sec003737
org.apache.spark.sql.execution.python5.8 sec002828
org.apache.spark.sql.execution.streaming13 sec018081
org.apache.spark.sql.execution.streaming.sources15 sec004242
org.apache.spark.sql.execution.streaming.state45 sec00209209
org.apache.spark.sql.execution.ui4.5 sec001919
org.apache.spark.sql.execution.vectorized38 sec006767
org.apache.spark.sql.expressions6.1 sec001212
org.apache.spark.sql.hive17 min02700702
org.apache.spark.sql.hive.client16 min201+20111058-3181260-117
org.apache.spark.sql.hive.execution36 min059715742171
org.apache.spark.sql.hive.execution.command2 min 6 sec00117117
org.apache.spark.sql.hive.orc2 min 26 sec02139141
org.apache.spark.sql.hive.security0.37 sec0033
org.apache.spark.sql.hive.thriftserver32 min019537556
org.apache.spark.sql.hive.thriftserver.ui3 sec0088
org.apache.spark.sql.internal12 sec008181
org.apache.spark.sql.jdbc21 sec054156210
org.apache.spark.sql.jdbc.v2-35 ms036036
org.apache.spark.sql.kafka01033 min00465465
org.apache.spark.sql.kafka010.consumer25 sec001818
org.apache.spark.sql.kafka010.producer0.35 sec0044
org.apache.spark.sql.sources10 min04426430
org.apache.spark.sql.streaming8 min 45 sec00515515
org.apache.spark.sql.streaming.continuous38 sec003535
org.apache.spark.sql.streaming.sources0.93 sec002828
org.apache.spark.sql.streaming.test10 sec005151
org.apache.spark.sql.streaming.ui13 sec001010
org.apache.spark.sql.test26 sec006868
org.apache.spark.sql.types0.29 sec02270272
org.apache.spark.sql.util2.8 sec012728
org.apache.spark.status2 sec0050+1850+18
org.apache.spark.status.api.v159 ms0022
org.apache.spark.status.api.v1.sql3.1 sec0066
org.apache.spark.storage3 min 33 sec00318318
org.apache.spark.streaming4 min 36 sec01243244
org.apache.spark.streaming.api.java15 ms0011
org.apache.spark.streaming.kafka01056 sec002525
org.apache.spark.streaming.kinesis2.2 sec0243761
org.apache.spark.streaming.rdd2.9 sec001313
org.apache.spark.streaming.receiver0.29 sec0066
org.apache.spark.streaming.scheduler12 sec002424
org.apache.spark.streaming.scheduler.rate68 ms001111
org.apache.spark.streaming.ui0.5 sec0088
org.apache.spark.streaming.util7 sec005555
org.apache.spark.ui16 sec025052
org.apache.spark.ui.scope1 ms0011
org.apache.spark.ui.storage21 ms0055
org.apache.spark.unsafe39 ms0088
org.apache.spark.unsafe.array3 ms0011
org.apache.spark.unsafe.hash0.33 sec0066
org.apache.spark.unsafe.map2.9 sec003232
org.apache.spark.unsafe.types0.39 sec016162
org.apache.spark.util32 sec00194194
org.apache.spark.util.collection3 min 6 sec03131134
org.apache.spark.util.collection.unsafe.sort3.7 sec008686
org.apache.spark.util.io1.8 sec0099
org.apache.spark.util.kvstore2 sec01105106
org.apache.spark.util.logging0.12 sec0011
org.apache.spark.util.random7.1 sec003232
org.apache.spark.util.sketch24 sec003434
test.org.apache.spark25 sec00112112
test.org.apache.spark.sql33 sec00121121
test.org.apache.spark.sql.execution.sort3 ms001010
test.org.apache.spark.sql.streaming0.27 sec0022
test.org.apache.spark.streaming22 sec007979