Console Output

Skipping 1,033 KB.. Full Log
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * network-shuffle / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * network-shuffle / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * network-shuffle / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * network-yarn / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * network-yarn / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * network-yarn / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * network-yarn / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * network-yarn / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * network-yarn / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * repl / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * repl / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * repl / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * repl / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * repl / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * repl / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * sketch / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * sketch / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * sketch / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * sketch / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * sketch / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * sketch / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * spark / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * spark / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * spark / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * spark / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * spark / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * spark / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * sql / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * sql / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * sql / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * sql / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * sql / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * sql / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * sql-kafka-0-10 / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * sql-kafka-0-10 / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * sql-kafka-0-10 / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * sql-kafka-0-10 / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * sql-kafka-0-10 / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * sql-kafka-0-10 / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * streaming / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * streaming / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * streaming / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * streaming / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * streaming / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * streaming / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * streaming-kafka-0-10 / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * streaming-kafka-0-10 / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * streaming-kafka-0-10 / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * streaming-kafka-0-10 / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * streaming-kafka-0-10 / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * streaming-kafka-0-10 / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * streaming-kafka-0-10-assembly / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * streaming-kafka-0-10-assembly / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * streaming-kafka-0-10-assembly / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * streaming-kafka-0-10-assembly / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * streaming-kafka-0-10-assembly / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * streaming-kafka-0-10-assembly / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * streaming-kinesis-asl / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * streaming-kinesis-asl / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * streaming-kinesis-asl / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * streaming-kinesis-asl / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * streaming-kinesis-asl / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * streaming-kinesis-asl / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * streaming-kinesis-asl-assembly / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * streaming-kinesis-asl-assembly / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * streaming-kinesis-asl-assembly / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * streaming-kinesis-asl-assembly / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * streaming-kinesis-asl-assembly / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * streaming-kinesis-asl-assembly / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * tags / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * tags / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * tags / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * tags / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * tags / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * tags / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * token-provider-kafka-0-10 / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * token-provider-kafka-0-10 / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * token-provider-kafka-0-10 / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * token-provider-kafka-0-10 / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * token-provider-kafka-0-10 / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * token-provider-kafka-0-10 / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * tools / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * tools / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * tools / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * tools / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * tools / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * tools / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * unsafe / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * unsafe / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * unsafe / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * unsafe / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * unsafe / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * unsafe / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn] * yarn / Compile / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1058
[warn] * yarn / M2r / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:289
[warn] * yarn / Sbt / publishMavenStyle
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:290
[warn] * yarn / Test / checkstyle / javaSource
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:1059
[warn] * yarn / scalaStyleOnCompile / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:191
[warn] * yarn / scalaStyleOnTest / logLevel
[warn]   +- /home/jenkins/workspace/SparkPullRequestBuilder/project/SparkBuild.scala:192
[warn]  
[warn] note: a setting might still be used by a command; to exclude a key from this `lintUnused` check
[warn] either append it to `Global / excludeLintKeys` or call .withRank(KeyRanks.Invisible) on the key
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[warn] multiple main classes detected: run 'show discoveredMainClasses' to see the list
[success] Total time: 37 s, completed Oct 21, 2021 12:07:09 PM

========================================================================
Running Spark unit tests
========================================================================

========================================================================
Running SparkR tests
========================================================================

Attaching package: ‘SparkR’

The following objects are masked from ‘package:testthat’:

    describe, not

The following objects are masked from ‘package:stats’:

    cov, filter, lag, na.omit, predict, sd, var, window

The following objects are masked from ‘package:base’:

    as.data.frame, colnames, colnames<-, drop, endsWith, intersect,
    rank, rbind, sample, startsWith, subset, summary, transform, union

Spark package found in SPARK_HOME: /home/jenkins/workspace/SparkPullRequestBuilder
binary_function: 
binary functions: ...........
binaryFile: 
functions on binary files: ....
broadcast: 
broadcast variables: ..
client: 
functions in client.R: .....
context: 
test functions in sparkR.R: ..............................................
includePackage: 
include R packages: ..
jvm_api: 
JVM API: ..
mllib_classification: 
MLlib classification algorithms, except for tree-based algorithms: ...........................................................................
mllib_clustering: 
MLlib clustering algorithms: ......................................................................
mllib_fpm: 
MLlib frequent pattern mining: ......
mllib_recommendation: 
MLlib recommendation algorithms: ........
mllib_regression: 
MLlib regression algorithms, except for tree-based algorithms: ........................................................................................................................................
mllib_stat: 
MLlib statistics algorithms: ........
mllib_tree: 
MLlib tree-based algorithms: ..............................................................................................
parallelize_collect: 
parallelize() and collect(): .............................
rdd: 
basic RDD functions: ............................................................................................................................................................................................................................................................................................................................................................................................................................................
Serde: 
SerDe functionality: .......................................
shuffle: 
partitionBy, groupByKey, reduceByKey etc.: ....................
sparkR: 
functions in sparkR.R: ....
sparkSQL_arrow: 
SparkSQL Arrow optimization: SSSSSSSSSSS
sparkSQL_eager: 
test show SparkDataFrame when eager execution is enabled.: ......
sparkSQL: 
SparkSQL functions: ......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
streaming: 
Structured Streaming: ..........................................
take: 
tests RDD function take(): ................
textFile: 
the textFile() function: ..............
utils: 
functions in utils.R: ..............................................
Windows: 
Windows-specific tests: S

══ Skipped ═════════════════════════════════════════════════════════════════════
1. createDataFrame/collect Arrow optimization (test_sparkSQL_arrow.R:28:3) - Reason: arrow cannot be loaded

2. createDataFrame/collect Arrow optimization - many partitions (partition order test) (test_sparkSQL_arrow.R:45:3) - Reason: arrow cannot be loaded

3. createDataFrame/collect Arrow optimization - type specification (test_sparkSQL_arrow.R:51:3) - Reason: arrow cannot be loaded

4. dapply() Arrow optimization (test_sparkSQL_arrow.R:75:3) - Reason: arrow cannot be loaded

5. dapply() Arrow optimization - type specification (test_sparkSQL_arrow.R:109:3) - Reason: arrow cannot be loaded

6. dapply() Arrow optimization - type specification (date and timestamp) (test_sparkSQL_arrow.R:138:3) - Reason: arrow cannot be loaded

7. gapply() Arrow optimization (test_sparkSQL_arrow.R:147:3) - Reason: arrow cannot be loaded

8. gapply() Arrow optimization - type specification (test_sparkSQL_arrow.R:190:3) - Reason: arrow cannot be loaded

9. gapply() Arrow optimization - type specification (date and timestamp) (test_sparkSQL_arrow.R:222:3) - Reason: arrow cannot be loaded

10. Arrow optimization - unsupported types (test_sparkSQL_arrow.R:233:3) - Reason: arrow cannot be loaded

11. SPARK-32478: gapply() Arrow optimization - error message for schema mismatch (test_sparkSQL_arrow.R:244:3) - Reason: arrow cannot be loaded

12. sparkJars tag in SparkContext (test_Windows.R:22:5) - Reason: This test is only for Windows, skipped

══ DONE ════════════════════════════════════════════════════════════════════════
Using R_SCRIPT_PATH = /usr/bin
++++ dirname /home/jenkins/workspace/SparkPullRequestBuilder/R/install-dev.sh
+++ cd /home/jenkins/workspace/SparkPullRequestBuilder/R
+++ pwd
++ FWDIR=/home/jenkins/workspace/SparkPullRequestBuilder/R
++ LIB_DIR=/home/jenkins/workspace/SparkPullRequestBuilder/R/lib
++ mkdir -p /home/jenkins/workspace/SparkPullRequestBuilder/R/lib
++ pushd /home/jenkins/workspace/SparkPullRequestBuilder/R
++ . /home/jenkins/workspace/SparkPullRequestBuilder/R/find-r.sh
+++ '[' -z /usr/bin ']'
++ . /home/jenkins/workspace/SparkPullRequestBuilder/R/create-rd.sh
+++ set -o pipefail
+++ set -e
+++++ dirname /home/jenkins/workspace/SparkPullRequestBuilder/R/create-rd.sh
++++ cd /home/jenkins/workspace/SparkPullRequestBuilder/R
++++ pwd
+++ FWDIR=/home/jenkins/workspace/SparkPullRequestBuilder/R
+++ pushd /home/jenkins/workspace/SparkPullRequestBuilder/R
+++ . /home/jenkins/workspace/SparkPullRequestBuilder/R/find-r.sh
++++ '[' -z /usr/bin ']'
+++ /usr/bin/Rscript -e ' if(requireNamespace("devtools", quietly=TRUE)) { setwd("/home/jenkins/workspace/SparkPullRequestBuilder/R"); devtools::document(pkg="./pkg", roclets="rd") }'
Updating SparkR documentation
Loading SparkR
Creating a new generic function for ‘as.data.frame’ in package ‘SparkR’
Creating a new generic function for ‘colnames’ in package ‘SparkR’
Creating a new generic function for ‘colnames<-’ in package ‘SparkR’
Creating a new generic function for ‘cov’ in package ‘SparkR’
Creating a new generic function for ‘drop’ in package ‘SparkR’
Creating a new generic function for ‘na.omit’ in package ‘SparkR’
Creating a new generic function for ‘filter’ in package ‘SparkR’
Creating a new generic function for ‘intersect’ in package ‘SparkR’
Creating a new generic function for ‘sample’ in package ‘SparkR’
Creating a new generic function for ‘transform’ in package ‘SparkR’
Creating a new generic function for ‘subset’ in package ‘SparkR’
Creating a new generic function for ‘summary’ in package ‘SparkR’
Creating a new generic function for ‘union’ in package ‘SparkR’
Creating a new generic function for ‘endsWith’ in package ‘SparkR’
Creating a new generic function for ‘startsWith’ in package ‘SparkR’
Creating a new generic function for ‘lag’ in package ‘SparkR’
Creating a new generic function for ‘rank’ in package ‘SparkR’
Creating a new generic function for ‘sd’ in package ‘SparkR’
Creating a new generic function for ‘var’ in package ‘SparkR’
Creating a new generic function for ‘window’ in package ‘SparkR’
Creating a new generic function for ‘predict’ in package ‘SparkR’
Creating a new generic function for ‘rbind’ in package ‘SparkR’
Creating a generic function for ‘substr’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘%in%’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘lapply’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘Filter’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘nrow’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘ncol’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘factorial’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘atan2’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘ifelse’ from package ‘base’ in package ‘SparkR’
++ /usr/bin/R CMD INSTALL --library=/home/jenkins/workspace/SparkPullRequestBuilder/R/lib /home/jenkins/workspace/SparkPullRequestBuilder/R/pkg/
* installing *source* package ‘SparkR’ ...
** using staged installation
** R
** inst
** byte-compile and prepare package for lazy loading
Creating a new generic function for ‘as.data.frame’ in package ‘SparkR’
Creating a new generic function for ‘colnames’ in package ‘SparkR’
Creating a new generic function for ‘colnames<-’ in package ‘SparkR’
Creating a new generic function for ‘cov’ in package ‘SparkR’
Creating a new generic function for ‘drop’ in package ‘SparkR’
Creating a new generic function for ‘na.omit’ in package ‘SparkR’
Creating a new generic function for ‘filter’ in package ‘SparkR’
Creating a new generic function for ‘intersect’ in package ‘SparkR’
Creating a new generic function for ‘sample’ in package ‘SparkR’
Creating a new generic function for ‘transform’ in package ‘SparkR’
Creating a new generic function for ‘subset’ in package ‘SparkR’
Creating a new generic function for ‘summary’ in package ‘SparkR’
Creating a new generic function for ‘union’ in package ‘SparkR’
Creating a new generic function for ‘endsWith’ in package ‘SparkR’
Creating a new generic function for ‘startsWith’ in package ‘SparkR’
Creating a new generic function for ‘lag’ in package ‘SparkR’
Creating a new generic function for ‘rank’ in package ‘SparkR’
Creating a new generic function for ‘sd’ in package ‘SparkR’
Creating a new generic function for ‘var’ in package ‘SparkR’
Creating a new generic function for ‘window’ in package ‘SparkR’
Creating a new generic function for ‘predict’ in package ‘SparkR’
Creating a new generic function for ‘rbind’ in package ‘SparkR’
Creating a generic function for ‘substr’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘%in%’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘lapply’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘Filter’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘nrow’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘ncol’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘factorial’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘atan2’ from package ‘base’ in package ‘SparkR’
Creating a generic function for ‘ifelse’ from package ‘base’ in package ‘SparkR’
** help
*** installing help indices
** building package indices
** installing vignettes
** testing if installed package can be loaded from temporary location
** testing if installed package can be loaded from final location
** testing if installed package keeps a record of temporary installation path
* DONE (SparkR)
++ cd /home/jenkins/workspace/SparkPullRequestBuilder/R/lib
++ jar cfM /home/jenkins/workspace/SparkPullRequestBuilder/R/lib/sparkr.zip SparkR
++ popd
++ cd /home/jenkins/workspace/SparkPullRequestBuilder/R/..
++ pwd
+ SPARK_HOME=/home/jenkins/workspace/SparkPullRequestBuilder
+ . /home/jenkins/workspace/SparkPullRequestBuilder/bin/load-spark-env.sh
++ '[' -z /home/jenkins/workspace/SparkPullRequestBuilder ']'
++ SPARK_ENV_SH=spark-env.sh
++ '[' -z '' ']'
++ export SPARK_ENV_LOADED=1
++ SPARK_ENV_LOADED=1
++ export SPARK_CONF_DIR=/home/jenkins/workspace/SparkPullRequestBuilder/conf
++ SPARK_CONF_DIR=/home/jenkins/workspace/SparkPullRequestBuilder/conf
++ SPARK_ENV_SH=/home/jenkins/workspace/SparkPullRequestBuilder/conf/spark-env.sh
++ [[ -f /home/jenkins/workspace/SparkPullRequestBuilder/conf/spark-env.sh ]]
++ '[' -z '' ']'
++ SCALA_VERSION_1=2.13
++ SCALA_VERSION_2=2.12
++ ASSEMBLY_DIR_1=/home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.13
++ ASSEMBLY_DIR_2=/home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.12
++ ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
++ [[ -d /home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.13 ]]
++ [[ -d /home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.13 ]]
++ export SPARK_SCALA_VERSION=2.12
++ SPARK_SCALA_VERSION=2.12
+ '[' -f /home/jenkins/workspace/SparkPullRequestBuilder/RELEASE ']'
+ SPARK_JARS_DIR=/home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.12/jars
+ '[' -d /home/jenkins/workspace/SparkPullRequestBuilder/assembly/target/scala-2.12/jars ']'
+ SPARK_HOME=/home/jenkins/workspace/SparkPullRequestBuilder
+ /usr/bin/R CMD build /home/jenkins/workspace/SparkPullRequestBuilder/R/pkg
* checking for file ‘/home/jenkins/workspace/SparkPullRequestBuilder/R/pkg/DESCRIPTION’ ... OK
* preparing ‘SparkR’:
* checking DESCRIPTION meta-information ... OK
* installing the package to build vignettes
* creating vignettes ... OK
* checking for LF line-endings in source and make files and shell scripts
* checking for empty or unneeded directories
* building ‘SparkR_3.3.0.tar.gz’

+ find pkg/vignettes/. -not -name . -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
++ grep Version /home/jenkins/workspace/SparkPullRequestBuilder/R/pkg/DESCRIPTION
++ awk '{print $NF}'
+ VERSION=3.3.0
+ CRAN_CHECK_OPTIONS=--as-cran
+ '[' -n 1 ']'
+ CRAN_CHECK_OPTIONS='--as-cran --no-tests'
+ '[' -n 1 ']'
+ CRAN_CHECK_OPTIONS='--as-cran --no-tests --no-manual --no-vignettes'
+ echo 'Running CRAN check with --as-cran --no-tests --no-manual --no-vignettes options'
Running CRAN check with --as-cran --no-tests --no-manual --no-vignettes options
+ export _R_CHECK_FORCE_SUGGESTS_=FALSE
+ _R_CHECK_FORCE_SUGGESTS_=FALSE
+ '[' -n 1 ']'
+ '[' -n 1 ']'
+ /usr/bin/R CMD check --as-cran --no-tests --no-manual --no-vignettes SparkR_3.3.0.tar.gz
* using log directory ‘/home/jenkins/workspace/SparkPullRequestBuilder/R/SparkR.Rcheck’
* using R version 3.6.3 (2020-02-29)
* using platform: x86_64-pc-linux-gnu (64-bit)
* using session charset: UTF-8
* using options ‘--no-tests --no-manual --no-vignettes --as-cran’
* checking for file ‘SparkR/DESCRIPTION’ ... OK
* checking extension type ... Package
* this is package ‘SparkR’ version ‘3.3.0’
* package encoding: UTF-8
* checking CRAN incoming feasibility ... NOTE
Maintainer: ‘Felix Cheung <felixcheung@apache.org>’

New submission

Package was archived on CRAN

CRAN repository db overrides:
  X-CRAN-Comment: Archived on 2021-06-28 as issues were not corrected
    in time.

  Should use tools::R_user_dir().
* checking package namespace information ... OK
* checking package dependencies ... NOTE
Package suggested but not available for checking: ‘arrow’
* checking if this is a source package ... OK
* checking if there is a namespace ... OK
* checking for executable files ... OK
* checking for hidden files and directories ... OK
* checking for portable file names ... OK
* checking for sufficient/correct file permissions ... OK
* checking whether package ‘SparkR’ can be installed ... OK
* checking installed package size ... OK
* checking package directory ... OK
* checking for future file timestamps ... OK
* checking ‘build’ directory ... OK
* checking DESCRIPTION meta-information ... OK
* checking top-level files ... OK
* checking for left-over files ... OK
* checking index information ... OK
* checking package subdirectories ... OK
* checking R files for non-ASCII characters ... OK
* checking R files for syntax errors ... OK
* checking whether the package can be loaded ... OK
* checking whether the package can be loaded with stated dependencies ... OK
* checking whether the package can be unloaded cleanly ... OK
* checking whether the namespace can be loaded with stated dependencies ... OK
* checking whether the namespace can be unloaded cleanly ... OK
* checking loading without being on the library search path ... OK
* checking use of S3 registration ... OK
* checking dependencies in R code ... OK
* checking S3 generic/method consistency ... OK
* checking replacement functions ... OK
* checking foreign function calls ... OK
* checking R code for possible problems ... OK
* checking Rd files ... OK
* checking Rd metadata ... OK
* checking Rd line widths ... OK
* checking Rd cross-references ... OK
* checking for missing documentation entries ... OK
* checking for code/documentation mismatches ... OK
* checking Rd \usage sections ... OK
* checking Rd contents ... OK
* checking for unstated dependencies in examples ... OK
* checking installed files from ‘inst/doc’ ... OK
* checking files in ‘vignettes’ ... OK
* checking examples ... OK
* checking for unstated dependencies in ‘tests’ ... OK
* checking tests ... SKIPPED
* checking for unstated dependencies in vignettes ... OK
* checking package vignettes in ‘inst/doc’ ... OK
* checking running R code from vignettes ... SKIPPED
* checking re-building of vignette outputs ... SKIPPED
* checking for detritus in the temp directory ... OK
* DONE

Status: 2 NOTEs
See
  ‘/home/jenkins/workspace/SparkPullRequestBuilder/R/SparkR.Rcheck/00check.log’
for details.


+ popd
Tests passed.
Attempting to post to GitHub...
 > Post successful.
+ ./build/sbt unsafe/test
Archiving artifacts
Recording test results
[Checks API] No suitable checks publisher found.
Setting status of 7bde2a66bc463565d5ae1d09dc1b8ddf32cf9b4f to SUCCESS with url https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/144514/ and message: 'Build finished. '
FileNotFoundException means that the credentials Jenkins is using is probably wrong. Or the user account does not have write access to the repo.
org.kohsuke.github.GHFileNotFoundException: https://api.github.com/repos/apache/spark/statuses/7bde2a66bc463565d5ae1d09dc1b8ddf32cf9b4f {"message":"Not Found","documentation_url":"https://docs.github.com/rest/reference/repos#create-a-commit-status"}
	at org.kohsuke.github.GitHubClient.interpretApiError(GitHubClient.java:486)
	at org.kohsuke.github.GitHubClient.sendRequest(GitHubClient.java:414)
	at org.kohsuke.github.GitHubClient.sendRequest(GitHubClient.java:358)
	at org.kohsuke.github.Requester.fetch(Requester.java:76)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1979)
	at org.jenkinsci.plugins.ghprb.extensions.status.GhprbSimpleStatus.createCommitStatus(GhprbSimpleStatus.java:283)
	at org.jenkinsci.plugins.ghprb.extensions.status.GhprbSimpleStatus.onBuildComplete(GhprbSimpleStatus.java:241)
	at org.jenkinsci.plugins.ghprb.GhprbBuilds.onCompleted(GhprbBuilds.java:205)
	at org.jenkinsci.plugins.ghprb.GhprbBuildListener.onCompleted(GhprbBuildListener.java:28)
	at hudson.model.listeners.RunListener.fireCompleted(RunListener.java:208)
	at hudson.model.Run.execute(Run.java:1933)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:99)
	at hudson.model.Executor.run(Executor.java:431)
Caused by: java.io.FileNotFoundException: https://api.github.com/repos/apache/spark/statuses/7bde2a66bc463565d5ae1d09dc1b8ddf32cf9b4f
	at java.base/jdk.internal.reflect.GeneratedConstructorAccessor206.newInstance(Unknown Source)
	at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
	at java.base/sun.net.www.protocol.http.HttpURLConnection$10.run(HttpURLConnection.java:1974)
	at java.base/sun.net.www.protocol.http.HttpURLConnection$10.run(HttpURLConnection.java:1969)
	at java.base/java.security.AccessController.doPrivileged(Native Method)
	at java.base/sun.net.www.protocol.http.HttpURLConnection.getChainedException(HttpURLConnection.java:1968)
	at java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1536)
	at java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1520)
	at java.base/sun.net.www.protocol.https.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:250)
	at org.kohsuke.github.GitHubHttpUrlConnectionClient$HttpURLConnectionResponseInfo.bodyStream(GitHubHttpUrlConnectionClient.java:196)
	at org.kohsuke.github.GitHubResponse$ResponseInfo.getBodyAsString(GitHubResponse.java:314)
	at org.kohsuke.github.GitHubResponse.parseBody(GitHubResponse.java:92)
	at org.kohsuke.github.Requester.lambda$fetch$1(Requester.java:76)
	at org.kohsuke.github.GitHubClient.createResponse(GitHubClient.java:455)
	at org.kohsuke.github.GitHubClient.sendRequest(GitHubClient.java:406)
	... 12 more
Caused by: java.io.FileNotFoundException: https://api.github.com/repos/apache/spark/statuses/7bde2a66bc463565d5ae1d09dc1b8ddf32cf9b4f
	at java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1920)
	at java.base/sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1520)
	at java.base/java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:527)
	at java.base/sun.net.www.protocol.https.HttpsURLConnectionImpl.getResponseCode(HttpsURLConnectionImpl.java:334)
	at org.kohsuke.github.GitHubHttpUrlConnectionClient.getResponseInfo(GitHubHttpUrlConnectionClient.java:64)
	at org.kohsuke.github.GitHubClient.sendRequest(GitHubClient.java:394)
	... 12 more

Refer to this link for build results (access rights to CI server needed): 
https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder/144514/

Finished: SUCCESS