SuccessConsole Output

Skipping 1,304 KB.. Full Log
0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/CountReadKmersArgs.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/CountSliceKmers.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/TransformAlignmentsArgs.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/TransformSequences$.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/TransformGenotypesArgs.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/CountSliceKmersArgs.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/TransformAlignments$.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/scaladocs/org/bdgenomics/adam/cli/TransformSequences.html longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/repo/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT-sources.jar longer than 100 characters.
[WARNING] Entry: adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/repo/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT-javadoc.jar longer than 100 characters.
[INFO] Building zip: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-distribution/target/adam-distribution-spark2_2.11-0.33.0-SNAPSHOT-bin.zip
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary for ADAM_2.11 0.33.0-SNAPSHOT:
[INFO] 
[INFO] ADAM_2.11 .......................................... SUCCESS [  9.671 s]
[INFO] ADAM_2.11: Shader workaround ....................... SUCCESS [  5.511 s]
[INFO] ADAM_2.11: Avro-to-Dataset codegen utils ........... SUCCESS [  7.427 s]
[INFO] ADAM_2.11: Core .................................... SUCCESS [01:51 min]
[INFO] ADAM_2.11: APIs for Java, Python ................... SUCCESS [ 20.153 s]
[INFO] ADAM_2.11: CLI ..................................... SUCCESS [ 25.965 s]
[INFO] ADAM_2.11: Assembly ................................ SUCCESS [ 13.529 s]
[INFO] ADAM_2.11: Python APIs ............................. SUCCESS [01:39 min]
[INFO] ADAM_2.11: Distribution ............................ SUCCESS [ 39.156 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time:  05:32 min
[INFO] Finished at: 2020-08-18T09:28:54-07:00
[INFO] ------------------------------------------------------------------------
+ grep bdgenomics.adam
+ grep egg
+ tar tzvf adam-distribution/target/adam-distribution-spark2_2.11-0.33.0-SNAPSHOT-bin.tar.gz
drwxrwxr-x jenkins/jenkins        0 2020-08-18 09:26 adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/r/bdgenomics.adam.egg-info/
-rw-r--r-- jenkins/jenkins 38674113 2020-08-18 09:26 adam-distribution-spark2_2.11-0.33.0-SNAPSHOT/repo/bdgenomics.adam-0.32.0a0-py3.6.egg
+ ./bin/pyadam
Using PYSPARK=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/bin/pyspark
2020-08-18 09:28:57 WARN  Utils:66 - Your hostname, research-jenkins-worker-07 resolves to a loopback address: 127.0.1.1; using 192.168.10.27 instead (on interface eth0)
2020-08-18 09:28:57 WARN  Utils:66 - Set SPARK_LOCAL_IP if you need to bind to another address
2020-08-18 09:28:57 WARN  NativeCodeLoader:62 - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
2020-08-18 09:29:02 WARN  Utils:66 - Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.debug.maxToStringFields' in SparkEnv.conf.

[Stage 0:>                                                          (0 + 1) / 1]
                                                                                
+ source deactivate
#!/bin/bash

# Determine the directory containing this script
if [[ -n $BASH_VERSION ]]; then
    _SCRIPT_LOCATION=${BASH_SOURCE[0]}
    _SHELL="bash"
elif [[ -n $ZSH_VERSION ]]; then
    _SCRIPT_LOCATION=${funcstack[1]}
    _SHELL="zsh"
else
    echo "Only bash and zsh are supported"
    return 1
fi
++ [[ -n 4.3.48(1)-release ]]
++ _SCRIPT_LOCATION=/home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/bin/deactivate
++ _SHELL=bash
_CONDA_DIR=$(dirname "$_SCRIPT_LOCATION")
dirname "$_SCRIPT_LOCATION"
+++ dirname /home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/bin/deactivate
++ _CONDA_DIR=/home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/bin

case "$(uname -s)" in
    CYGWIN*|MINGW*|MSYS*)
        EXT=".exe"
        export MSYS2_ENV_CONV_EXCL=CONDA_PATH
        ;;
    *)
        EXT=""
        ;;
esac
++ case "$(uname -s)" in
uname -s
+++ uname -s
++ EXT=

# shift over all args.  We don't accept any, so it's OK that we ignore them all here.
while [[ $# > 0 ]]
do
    key="$1"
    case $key in
        -h|--help)
            "$_CONDA_DIR/conda" ..deactivate $_SHELL$EXT -h
            if [[ -n $BASH_VERSION ]] && [[ "$(basename "$0" 2> /dev/null)" == "deactivate" ]]; then
                exit 0
            else
                return 0
            fi
            ;;
    esac
    shift # past argument or value
done
++ [[ 0 > 0 ]]

# Ensure that this script is sourced, not executed
# Note that if the script was executed, we're running inside bash!
# Also note that errors are ignored as `activate foo` doesn't generate a bad
# value for $0 which would cause errors.
if [[ -n $BASH_VERSION ]] && [[ "$(basename "$0" 2> /dev/null)" == "deactivate" ]]; then
    (>&2 echo "Error: deactivate must be sourced. Run 'source deactivate'
instead of 'deactivate'.
")
    "$_CONDA_DIR/conda" ..deactivate $_SHELL$EXT -h
    exit 1
fi
++ [[ -n 4.3.48(1)-release ]]
basename "$0" 2> /dev/null
+++ basename /home/jenkins/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu/scripts/jenkins-test
++ [[ jenkins-test == \d\e\a\c\t\i\v\a\t\e ]]

if [[ -z "$CONDA_PATH_BACKUP" ]]; then
    if [[ -n $BASH_VERSION ]] && [[ "$(basename "$0" 2> /dev/null)" == "deactivate" ]]; then
        exit 0
    else
        return 0
    fi
fi
++ [[ -z /usr/lib/jvm/java-8-oracle/bin/:/usr/lib/jvm/java-8-oracle/bin/:/home/anaconda/bin/:/home/jenkins/tools/hudson.tasks.Maven_MavenInstallation/Maven_3.6.3/bin/:/home/jenkins/gems/bin:/usr/local/go/bin:/home/jenkins/go-projects/bin:/home/jenkins/anaconda2/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games ]]

if (( $? == 0 )); then
    # Inverse of activation: run deactivate scripts prior to deactivating env
    _CONDA_D="${CONDA_PREFIX}/etc/conda/deactivate.d"
    if [[ -d $_CONDA_D ]]; then
        eval $(find "$_CONDA_D" -iname "*.sh" -exec echo source \'{}\'';' \;)
    fi

#    # get the activation path that would have been provided for this prefix
#    _LAST_ACTIVATE_PATH=$("$_CONDA_DIR/conda" ..activate $_SHELL$EXT "$CONDA_PREFIX")
#
#    # in activate, we replace a placeholder so that conda keeps its place in the PATH order
#    # The activate script sets _CONDA_HOLD here to activate that behavior.
#    #   Otherwise, PATH is simply removed.
#    if [ -n "$_CONDA_HOLD" ]; then
#        export PATH="$($_CONDA_PYTHON2 -c "import re; print(re.sub(r'$_LAST_ACTIVATE_PATH(:?)', r'CONDA_PATH_PLACEHOLDER\1', '$PATH', 1))")"
#    else
#        export PATH="$($_CONDA_PYTHON2 -c "import re; print(re.sub(r'$_LAST_ACTIVATE_PATH(:?)', r'', '$PATH', 1))")"
#    fi
#
#    unset _LAST_ACTIVATE_PATH

    export PATH=$("$_CONDA_DIR/conda" ..deactivate.path $_SHELL$EXT "$CONDA_PREFIX")

    unset CONDA_DEFAULT_ENV
    unset CONDA_PREFIX
    unset CONDA_PATH_BACKUP
    export PS1="$CONDA_PS1_BACKUP"
    unset CONDA_PS1_BACKUP
    unset _CONDA_PYTHON2
else
    unset _CONDA_PYTHON2
    return $?
fi
++ ((  0 == 0  ))
++ _CONDA_D=/home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/etc/conda/deactivate.d
++ [[ -d /home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/etc/conda/deactivate.d ]]
"$_CONDA_DIR/conda" ..deactivate.path $_SHELL$EXT "$CONDA_PREFIX"
+++ /home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97/bin/conda ..deactivate.path bash /home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97
++ export PATH=/usr/lib/jvm/java-8-oracle/bin/:/usr/lib/jvm/java-8-oracle/bin/:/home/anaconda/bin/:/home/jenkins/tools/hudson.tasks.Maven_MavenInstallation/Maven_3.6.3/bin/:/home/jenkins/gems/bin:/usr/local/go/bin:/home/jenkins/go-projects/bin:/home/jenkins/anaconda2/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
++ PATH=/usr/lib/jvm/java-8-oracle/bin/:/usr/lib/jvm/java-8-oracle/bin/:/home/anaconda/bin/:/home/jenkins/tools/hudson.tasks.Maven_MavenInstallation/Maven_3.6.3/bin/:/home/jenkins/gems/bin:/usr/local/go/bin:/home/jenkins/go-projects/bin:/home/jenkins/anaconda2/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
++ unset CONDA_DEFAULT_ENV
++ unset CONDA_PREFIX
++ unset CONDA_PATH_BACKUP
++ export PS1=
++ PS1=
++ unset CONDA_PS1_BACKUP
++ unset _CONDA_PYTHON2

if [[ -n $BASH_VERSION ]]; then
    hash -r
elif [[ -n $ZSH_VERSION ]]; then
    rehash
fi
++ [[ -n 4.3.48(1)-release ]]
++ hash -r
+ conda remove -y -n adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97 --all

Package plan for package removal in environment /home/jenkins/anaconda2/envs/adam-build-9d432ffc-ca6f-4bc4-b5aa-2fd5aec85a97:

The following packages will be REMOVED:

    _libgcc_mutex:    0.1-main               
    ca-certificates:  2020.6.24-0            
    certifi:          2020.6.20-py36_0       
    ld_impl_linux-64: 2.33.1-h53a641e_7      
    libedit:          3.1.20191231-h14c3975_1
    libffi:           3.3-he6710b0_2         
    libgcc-ng:        9.1.0-hdf63c60_0       
    libstdcxx-ng:     9.1.0-hdf63c60_0       
    ncurses:          6.2-he6710b0_1         
    openssl:          1.1.1g-h7b6447c_0      
    pip:              20.2.2-py36_0          
    python:           3.6.10-h7579374_2      
    readline:         8.0-h7b6447c_0         
    setuptools:       49.6.0-py36_0          
    sqlite:           3.32.3-h62c20be_0      
    tk:               8.6.10-hbc83047_0      
    wheel:            0.34.2-py36_0          
    xz:               5.2.5-h7b6447c_0       
    zlib:             1.2.11-h7b6447c_3      

+ cp -r adam-python/target /home/jenkins/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu/scripts/../adam-python/
+ pushd adam-python
/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-python /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded ~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu
+ make clean
pip uninstall -y adam
Skipping adam as it is not installed.
rm -rf bdgenomics/*.egg*
rm -rf build/
rm -rf dist/
+ make clean_sdist
rm -rf dist
+ popd
/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded ~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu

if [ ${SPARK_VERSION} == 3.0.0 ]
then
    echo "Unable to build R support for Spark 3.0.0, SparkR is not available"
else
    # make a directory to install SparkR into, and set the R user libs path
    export R_LIBS_USER=${SPARK_HOME}/local_R_libs
    mkdir -p ${R_LIBS_USER}
    R CMD INSTALL \
      -l ${R_LIBS_USER} \
      ${SPARK_HOME}/R/lib/SparkR/

    export SPARKR_SUBMIT_ARGS="--jars ${ASSEMBLY_DIR}/${ASSEMBLY_JAR} --driver-class-path ${ASSEMBLY_DIR}/${ASSEMBLY_JAR} sparkr-shell"

    mvn -U \
    	-P r \
    	package \
    	-Dsuites=select.no.suites\* \
    	-Dhadoop.version=${HADOOP_VERSION}
fi
+ '[' 2.4.6 == 3.0.0 ']'
+ export R_LIBS_USER=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/local_R_libs
+ R_LIBS_USER=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/local_R_libs
+ mkdir -p /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/local_R_libs
+ R CMD INSTALL -l /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/local_R_libs /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/R/lib/SparkR/
* installing *binary* package ‘SparkR’ ...
* DONE (SparkR)
+ export 'SPARKR_SUBMIT_ARGS=--jars /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar --driver-class-path /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar sparkr-shell'
+ SPARKR_SUBMIT_ARGS='--jars /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar --driver-class-path /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar sparkr-shell'
+ mvn -U -P r package '-Dsuites=select.no.suites*' -Dhadoop.version=2.7.5
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=1g; support was removed in 8.0
[INFO] Scanning for projects...
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Build Order:
[INFO] 
[INFO] ADAM_2.11                                                          [pom]
[INFO] ADAM_2.11: Shader workaround                                       [jar]
[INFO] ADAM_2.11: Avro-to-Dataset codegen utils                           [jar]
[INFO] ADAM_2.11: Core                                                    [jar]
[INFO] ADAM_2.11: APIs for Java, Python                                   [jar]
[INFO] ADAM_2.11: CLI                                                     [jar]
[INFO] ADAM_2.11: Assembly                                                [jar]
[INFO] ADAM_2.11: R APIs                                                  [jar]
[INFO] 
[INFO] ------------< org.bdgenomics.adam:adam-parent-spark2_2.11 >-------------
[INFO] Building ADAM_2.11 0.33.0-SNAPSHOT                                 [1/8]
[INFO] --------------------------------[ pom ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-parent-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-parent-spark2_2.11 ---
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-parent-spark2_2.11 ---
[INFO] Modified 2 of 435 .scala files
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-parent-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-parent-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-shade-spark2_2.11 >-------------
[INFO] Building ADAM_2.11: Shader workaround 0.33.0-SNAPSHOT              [2/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-shade-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-shade-spark2_2.11 ---
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-shade-spark2_2.11 ---
[INFO] Modified 0 of 0 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-shade-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-shade/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-shade-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-shade-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- plexus-component-metadata:1.5.5:generate-metadata (default) @ adam-shade-spark2_2.11 ---
[INFO] Discovered 1 component descriptors(s)
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-shade-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-shade/src/test/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-shade-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-shade-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-shade-spark2_2.11 ---
[INFO] No tests to run.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-shade-spark2_2.11 ---
[INFO] Building jar: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-shade/target/adam-shade-spark2_2.11-0.33.0-SNAPSHOT.jar
[INFO] 
[INFO] ------------< org.bdgenomics.adam:adam-codegen-spark2_2.11 >------------
[INFO] Building ADAM_2.11: Avro-to-Dataset codegen utils 0.33.0-SNAPSHOT  [3/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-codegen-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-codegen-spark2_2.11 ---
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-source (add-source) @ adam-codegen-spark2_2.11 ---
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-codegen/src/main/scala added.
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-codegen-spark2_2.11 ---
[INFO] Modified 0 of 4 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-codegen-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-codegen/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-codegen-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-codegen-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-test-source (add-test-source) @ adam-codegen-spark2_2.11 ---
[INFO] Test Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-codegen/src/test/scala added.
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-codegen-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-codegen/src/test/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-codegen-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-codegen-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-codegen-spark2_2.11 ---
[INFO] Tests are skipped.
[INFO] 
[INFO] --- scalatest-maven-plugin:2.0.0:test (test) @ adam-codegen-spark2_2.11 ---
Discovery starting.
Discovery completed in 41 milliseconds.
Run starting. Expected test count is: 0
Run completed in 44 milliseconds.
Total number of tests run: 0
Suites: completed 0, aborted 0
Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
No tests were executed.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-codegen-spark2_2.11 ---
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-core-spark2_2.11 >--------------
[INFO] Building ADAM_2.11: Core 0.33.0-SNAPSHOT                           [4/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-core-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-core-spark2_2.11 ---
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-source (add-source) @ adam-core-spark2_2.11 ---
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/main/scala added.
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/target/generated-sources/src/main/scala added.
[INFO] 
[INFO] --- exec-maven-plugin:1.5.0:java (generate-scala-products) @ adam-core-spark2_2.11 ---
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
[INFO] 
[INFO] --- exec-maven-plugin:1.5.0:java (generate-scala-projection-fields) @ adam-core-spark2_2.11 ---
SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-core-spark2_2.11 ---
[INFO] Modified 2 of 204 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-core-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-core-spark2_2.11 ---
[INFO] /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/main/java:-1: info: compiling
[INFO] /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/main/scala:-1: info: compiling
[INFO] /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/target/generated-sources/src/main/scala:-1: info: compiling
[INFO] Compiling 140 source files to /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/target/2.11.12/classes at 1597768165705
[WARNING] /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/main/scala/org/bdgenomics/adam/rdd/GenomicDataset.scala:3091: warning: no valid targets for annotation on value uTag - it is discarded unused. You may specify targets with meta-annotations, e.g. @(transient @getter)
[WARNING]   @transient val uTag: TypeTag[U]
[WARNING]    ^
[WARNING] warning: there were 53 deprecation warnings; re-run with -deprecation for details
[WARNING] warning: there were 5 feature warnings; re-run with -feature for details
[WARNING] three warnings found
[INFO] prepare-compile in 0 s
[INFO] compile in 31 s
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-core-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-test-source (add-test-source) @ adam-core-spark2_2.11 ---
[INFO] Test Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/src/test/scala added.
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-core-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 152 resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-core-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-core-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-core-spark2_2.11 ---
[INFO] Tests are skipped.
[INFO] 
[INFO] --- scalatest-maven-plugin:2.0.0:test (test) @ adam-core-spark2_2.11 ---
Discovery starting.
Discovery completed in 1 second, 195 milliseconds.
Run starting. Expected test count is: 0
Run completed in 1 second, 199 milliseconds.
Total number of tests run: 0
Suites: completed 0, aborted 0
Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
No tests were executed.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-core-spark2_2.11 ---
[INFO] Building jar: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-core/target/adam-core-spark2_2.11-0.33.0-SNAPSHOT.jar
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:test-jar (default) @ adam-core-spark2_2.11 ---
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-apis-spark2_2.11 >--------------
[INFO] Building ADAM_2.11: APIs for Java, Python 0.33.0-SNAPSHOT          [5/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-apis-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-apis-spark2_2.11 ---
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-source (add-source) @ adam-apis-spark2_2.11 ---
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-apis/src/main/scala added.
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-apis-spark2_2.11 ---
[INFO] Modified 0 of 5 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-apis-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-apis/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-apis-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-apis-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-test-source (add-test-source) @ adam-apis-spark2_2.11 ---
[INFO] Test Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-apis/src/test/scala added.
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-apis-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 2 resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-apis-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-apis-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-apis-spark2_2.11 ---
[INFO] Tests are skipped.
[INFO] 
[INFO] --- scalatest-maven-plugin:2.0.0:test (test) @ adam-apis-spark2_2.11 ---
Discovery starting.
Discovery completed in 131 milliseconds.
Run starting. Expected test count is: 0
Run completed in 136 milliseconds.
Total number of tests run: 0
Suites: completed 0, aborted 0
Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
No tests were executed.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-apis-spark2_2.11 ---
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:test-jar (default) @ adam-apis-spark2_2.11 ---
[INFO] 
[INFO] --------------< org.bdgenomics.adam:adam-cli-spark2_2.11 >--------------
[INFO] Building ADAM_2.11: CLI 0.33.0-SNAPSHOT                            [6/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-cli-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-cli-spark2_2.11 ---
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:timestamp-property (timestamp-property) @ adam-cli-spark2_2.11 ---
[INFO] 
[INFO] --- git-commit-id-plugin:2.2.2:revision (default) @ adam-cli-spark2_2.11 ---
[INFO] 
[INFO] --- templating-maven-plugin:1.0.0:filter-sources (filter-src) @ adam-cli-spark2_2.11 ---
[INFO] Coping files with filtering to temporary directory.
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 1 resource
[INFO] No files needs to be copied to output directory. Up to date: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-cli/target/generated-sources/java-templates
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-cli/target/generated-sources/java-templates added.
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-source (add-source) @ adam-cli-spark2_2.11 ---
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-cli/src/main/scala added.
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-cli-spark2_2.11 ---
[INFO] Modified 0 of 29 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-cli-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-cli/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-cli-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-cli-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-test-source (add-test-source) @ adam-cli-spark2_2.11 ---
[INFO] Test Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-cli/src/test/scala added.
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-cli-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 15 resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-cli-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-cli-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-cli-spark2_2.11 ---
[INFO] Tests are skipped.
[INFO] 
[INFO] --- scalatest-maven-plugin:2.0.0:test (test) @ adam-cli-spark2_2.11 ---
Discovery starting.
Discovery completed in 140 milliseconds.
Run starting. Expected test count is: 0
Run completed in 145 milliseconds.
Total number of tests run: 0
Suites: completed 0, aborted 0
Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
No tests were executed.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-cli-spark2_2.11 ---
[INFO] 
[INFO] -----------< org.bdgenomics.adam:adam-assembly-spark2_2.11 >------------
[INFO] Building ADAM_2.11: Assembly 0.33.0-SNAPSHOT                       [7/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-assembly-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-assembly-spark2_2.11 ---
[INFO] 
[INFO] --- git-commit-id-plugin:2.2.2:revision (default) @ adam-assembly-spark2_2.11 ---
[INFO] 
[INFO] --- templating-maven-plugin:1.0.0:filter-sources (filter-src) @ adam-assembly-spark2_2.11 ---
[INFO] Request to add '/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/src/main/java-templates' folder. Not added since it does not exist.
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-source (add-source) @ adam-assembly-spark2_2.11 ---
[INFO] Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/src/main/scala added.
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-assembly-spark2_2.11 ---
[INFO] Modified 0 of 1 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-assembly-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-assembly-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-assembly-spark2_2.11 ---
[INFO] Nothing to compile - all classes are up to date
[INFO] 
[INFO] --- build-helper-maven-plugin:3.0.0:add-test-source (add-test-source) @ adam-assembly-spark2_2.11 ---
[INFO] Test Source directory: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/src/test/scala added.
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-assembly-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/src/test/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-assembly-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-assembly-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-assembly-spark2_2.11 ---
[INFO] Tests are skipped.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-assembly-spark2_2.11 ---
[INFO] 
[INFO] --- maven-shade-plugin:3.2.0:shade (default) @ adam-assembly-spark2_2.11 ---
[INFO] Including org.bdgenomics.adam:adam-cli-spark2_2.11:jar:0.33.0-SNAPSHOT in the shaded jar.
[INFO] Including org.bdgenomics.utils:utils-misc-spark2_2.11:jar:0.3.0 in the shaded jar.
[INFO] Including org.bdgenomics.utils:utils-io-spark2_2.11:jar:0.3.0 in the shaded jar.
[INFO] Including org.apache.httpcomponents:httpclient:jar:4.5.7 in the shaded jar.
[INFO] Including org.apache.httpcomponents:httpcore:jar:4.4.11 in the shaded jar.
[INFO] Including commons-logging:commons-logging:jar:1.2 in the shaded jar.
[INFO] Including commons-codec:commons-codec:jar:1.11 in the shaded jar.
[INFO] Including org.bdgenomics.utils:utils-cli-spark2_2.11:jar:0.3.0 in the shaded jar.
[INFO] Including org.clapper:grizzled-slf4j_2.11:jar:1.3.4 in the shaded jar.
[INFO] Including org.slf4j:slf4j-api:jar:1.7.30 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-avro:jar:1.10.1 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-column:jar:1.10.1 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-common:jar:1.10.1 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-encoding:jar:1.10.1 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-hadoop:jar:1.10.1 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-jackson:jar:1.10.1 in the shaded jar.
[INFO] Including commons-pool:commons-pool:jar:1.6 in the shaded jar.
[INFO] Including org.apache.parquet:parquet-format:jar:2.4.0 in the shaded jar.
[INFO] Including org.bdgenomics.bdg-formats:bdg-formats:jar:0.15.0 in the shaded jar.
[INFO] Including org.apache.avro:avro:jar:1.8.2 in the shaded jar.
[INFO] Including org.codehaus.jackson:jackson-core-asl:jar:1.9.13 in the shaded jar.
[INFO] Including org.codehaus.jackson:jackson-mapper-asl:jar:1.9.13 in the shaded jar.
[INFO] Including com.thoughtworks.paranamer:paranamer:jar:2.8 in the shaded jar.
[INFO] Including org.xerial.snappy:snappy-java:jar:1.1.1.3 in the shaded jar.
[INFO] Including org.apache.commons:commons-compress:jar:1.8.1 in the shaded jar.
[INFO] Including org.tukaani:xz:jar:1.5 in the shaded jar.
[INFO] Including org.bdgenomics.adam:adam-core-spark2_2.11:jar:0.33.0-SNAPSHOT in the shaded jar.
[INFO] Including org.bdgenomics.utils:utils-intervalrdd-spark2_2.11:jar:0.3.0 in the shaded jar.
[INFO] Including com.esotericsoftware.kryo:kryo:jar:2.24.0 in the shaded jar.
[INFO] Including com.esotericsoftware.minlog:minlog:jar:1.2 in the shaded jar.
[INFO] Including org.objenesis:objenesis:jar:2.1 in the shaded jar.
[INFO] Including commons-io:commons-io:jar:2.6 in the shaded jar.
[INFO] Including it.unimi.dsi:fastutil:jar:6.6.5 in the shaded jar.
[INFO] Including org.seqdoop:hadoop-bam:jar:7.9.2 in the shaded jar.
[INFO] Including com.github.jsr203hadoop:jsr203hadoop:jar:1.0.3 in the shaded jar.
[INFO] Including com.github.samtools:htsjdk:jar:2.19.0 in the shaded jar.
[INFO] Including org.apache.commons:commons-jexl:jar:2.1.1 in the shaded jar.
[INFO] Including gov.nih.nlm.ncbi:ngs-java:jar:2.9.0 in the shaded jar.
[INFO] Including com.google.guava:guava:jar:27.0-jre in the shaded jar.
[INFO] Including com.google.guava:failureaccess:jar:1.0 in the shaded jar.
[INFO] Including com.google.guava:listenablefuture:jar:9999.0-empty-to-avoid-conflict-with-guava in the shaded jar.
[INFO] Including org.checkerframework:checker-qual:jar:2.5.2 in the shaded jar.
[INFO] Including com.google.errorprone:error_prone_annotations:jar:2.2.0 in the shaded jar.
[INFO] Including com.google.j2objc:j2objc-annotations:jar:1.1 in the shaded jar.
[INFO] Including org.codehaus.mojo:animal-sniffer-annotations:jar:1.17 in the shaded jar.
[INFO] Including org.bdgenomics.adam:adam-codegen-spark2_2.11:jar:0.33.0-SNAPSHOT in the shaded jar.
[INFO] Including org.bdgenomics.adam:adam-apis-spark2_2.11:jar:0.33.0-SNAPSHOT in the shaded jar.
[INFO] Including args4j:args4j:jar:2.33 in the shaded jar.
[INFO] Including net.codingwell:scala-guice_2.11:jar:4.2.1 in the shaded jar.
[INFO] Including com.google.inject:guice:jar:4.2.0 in the shaded jar.
[INFO] Including javax.inject:javax.inject:jar:1 in the shaded jar.
[INFO] Including aopalliance:aopalliance:jar:1.0 in the shaded jar.
[INFO] Including org.scala-lang:scala-reflect:jar:2.11.12 in the shaded jar.
[INFO] Including com.google.code.findbugs:jsr305:jar:1.3.9 in the shaded jar.
[WARNING] WORKAROUND:  refusing to add class org/apache/parquet/avro/AvroSchemaConverter$2.class from jar /home/jenkins/.m2/repository/org/apache/parquet/parquet-avro/1.10.1/parquet-avro-1.10.1.jar
[WARNING] WORKAROUND:  refusing to add class org/apache/parquet/avro/AvroSchemaConverter.class from jar /home/jenkins/.m2/repository/org/apache/parquet/parquet-avro/1.10.1/parquet-avro-1.10.1.jar
[WARNING] WORKAROUND:  refusing to add class org/apache/parquet/avro/AvroSchemaConverter$1.class from jar /home/jenkins/.m2/repository/org/apache/parquet/parquet-avro/1.10.1/parquet-avro-1.10.1.jar
[WARNING] jsr305-1.3.9.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 35 overlapping classes: 
[WARNING]   - javax.annotation.RegEx
[WARNING]   - javax.annotation.concurrent.Immutable
[WARNING]   - javax.annotation.meta.TypeQualifierDefault
[WARNING]   - javax.annotation.meta.TypeQualifier
[WARNING]   - javax.annotation.Syntax
[WARNING]   - javax.annotation.CheckForNull
[WARNING]   - javax.annotation.Nonnull
[WARNING]   - javax.annotation.CheckReturnValue
[WARNING]   - javax.annotation.meta.TypeQualifierNickname
[WARNING]   - javax.annotation.MatchesPattern
[WARNING]   - 25 more...
[WARNING] jackson-mapper-asl-1.9.13.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 502 overlapping classes: 
[WARNING]   - org.codehaus.jackson.map.ext.DOMSerializer
[WARNING]   - org.codehaus.jackson.node.POJONode
[WARNING]   - org.codehaus.jackson.map.ser.StdSerializers$UtilDateSerializer
[WARNING]   - org.codehaus.jackson.map.deser.std.JsonNodeDeserializer$ArrayDeserializer
[WARNING]   - org.codehaus.jackson.map.ext.JodaDeserializers$LocalDateDeserializer
[WARNING]   - org.codehaus.jackson.map.deser.std.PrimitiveArrayDeserializers$StringDeser
[WARNING]   - org.codehaus.jackson.map.util.Comparators$1
[WARNING]   - org.codehaus.jackson.map.util.StdDateFormat
[WARNING]   - org.codehaus.jackson.map.KeyDeserializer
[WARNING]   - org.codehaus.jackson.map.MapperConfig$Impl
[WARNING]   - 492 more...
[WARNING] ngs-java-2.9.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 73 overlapping classes: 
[WARNING]   - ngs.itf.ReadItf
[WARNING]   - gov.nih.nlm.ncbi.ngs.error.cause.JvmErrorCause
[WARNING]   - ngs.ReferenceIterator
[WARNING]   - gov.nih.nlm.ncbi.ngs.Manager$1
[WARNING]   - gov.nih.nlm.ncbi.ngs.LMProperties
[WARNING]   - gov.nih.nlm.ncbi.ngs.error.LibraryLoadError
[WARNING]   - gov.nih.nlm.ncbi.ngs.LibDependencies
[WARNING]   - gov.nih.nlm.ncbi.ngs.error.cause.ConnectionProblemCause
[WARNING]   - ngs.itf.PileupEventItf
[WARNING]   - gov.nih.nlm.ncbi.ngs.LibManager$Location
[WARNING]   - 63 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, utils-io-spark2_2.11-0.3.0.jar define 22 overlapping classes: 
[WARNING]   - org.bdgenomics.utils.io.HTTPRangedByteAccess$$anonfun$3
[WARNING]   - org.bdgenomics.utils.io.FileLocator
[WARNING]   - org.bdgenomics.utils.io.ByteAccess
[WARNING]   - org.bdgenomics.utils.io.HTTPRangedByteAccess
[WARNING]   - org.bdgenomics.utils.io.HTTPRangedByteAccess$$anonfun$readByteStream$1
[WARNING]   - org.bdgenomics.utils.io.HTTPRangedByteAccess$$anonfun$1
[WARNING]   - org.bdgenomics.utils.io.ByteAccess$$anonfun$readFully$2
[WARNING]   - org.bdgenomics.utils.io.HTTPRangedByteAccess$$anonfun$2
[WARNING]   - org.bdgenomics.utils.io.LocalFileByteAccess
[WARNING]   - org.bdgenomics.utils.io.HTTPFileLocator$
[WARNING]   - 12 more...
[WARNING] javax.inject-1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 6 overlapping classes: 
[WARNING]   - javax.inject.Inject
[WARNING]   - javax.inject.Singleton
[WARNING]   - javax.inject.Scope
[WARNING]   - javax.inject.Named
[WARNING]   - javax.inject.Provider
[WARNING]   - javax.inject.Qualifier
[WARNING] error_prone_annotations-2.2.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 22 overlapping classes: 
[WARNING]   - com.google.errorprone.annotations.NoAllocation
[WARNING]   - com.google.errorprone.annotations.Var
[WARNING]   - com.google.errorprone.annotations.IncompatibleModifiers
[WARNING]   - com.google.errorprone.annotations.CompatibleWith
[WARNING]   - com.google.errorprone.annotations.concurrent.LockMethod
[WARNING]   - com.google.errorprone.annotations.FormatString
[WARNING]   - com.google.errorprone.annotations.DoNotCall
[WARNING]   - com.google.errorprone.annotations.Immutable
[WARNING]   - com.google.errorprone.annotations.RestrictedApi
[WARNING]   - com.google.errorprone.annotations.ForOverride
[WARNING]   - 12 more...
[WARNING] commons-pool-1.6.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 55 overlapping classes: 
[WARNING]   - org.apache.commons.pool.PoolUtils$PoolableObjectFactoryAdaptor
[WARNING]   - org.apache.commons.pool.impl.GenericObjectPool$1
[WARNING]   - org.apache.commons.pool.impl.GenericObjectPool$Latch
[WARNING]   - org.apache.commons.pool.PoolUtils$ErodingFactor
[WARNING]   - org.apache.commons.pool.BasePoolableObjectFactory
[WARNING]   - org.apache.commons.pool.PoolUtils$KeyedPoolableObjectFactoryAdaptor
[WARNING]   - org.apache.commons.pool.impl.EvictionTimer$PrivilegedGetTccl
[WARNING]   - org.apache.commons.pool.impl.StackKeyedObjectPool
[WARNING]   - org.apache.commons.pool.BaseKeyedPoolableObjectFactory
[WARNING]   - org.apache.commons.pool.impl.GenericKeyedObjectPool$ObjectQueue
[WARNING]   - 45 more...
[WARNING] aopalliance-1.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 9 overlapping classes: 
[WARNING]   - org.aopalliance.intercept.ConstructorInterceptor
[WARNING]   - org.aopalliance.intercept.MethodInvocation
[WARNING]   - org.aopalliance.intercept.MethodInterceptor
[WARNING]   - org.aopalliance.intercept.Invocation
[WARNING]   - org.aopalliance.aop.AspectException
[WARNING]   - org.aopalliance.intercept.Interceptor
[WARNING]   - org.aopalliance.intercept.Joinpoint
[WARNING]   - org.aopalliance.aop.Advice
[WARNING]   - org.aopalliance.intercept.ConstructorInvocation
[WARNING] args4j-2.33.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 74 overlapping classes: 
[WARNING]   - org.kohsuke.args4j.spi.DoubleOptionHandler
[WARNING]   - org.kohsuke.args4j.spi.MethodSetter
[WARNING]   - org.kohsuke.args4j.spi.MacAddressOptionHandler
[WARNING]   - org.kohsuke.args4j.spi.StringArrayOptionHandler
[WARNING]   - org.kohsuke.args4j.spi.SubCommand
[WARNING]   - org.kohsuke.args4j.spi.PatternOptionHandler
[WARNING]   - org.kohsuke.args4j.ParserProperties$1
[WARNING]   - org.kohsuke.args4j.OptionHandlerFilter$2
[WARNING]   - org.kohsuke.args4j.spi.MultiFileOptionHandler
[WARNING]   - org.kohsuke.args4j.OptionHandlerRegistry$DefaultConstructorHandlerFactory
[WARNING]   - 64 more...
[WARNING] guice-4.2.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 573 overlapping classes: 
[WARNING]   - com.google.inject.Scope
[WARNING]   - com.google.inject.Binding
[WARNING]   - com.google.inject.internal.cglib.core.$EmitUtils$3
[WARNING]   - com.google.inject.spi.TypeConverter
[WARNING]   - com.google.inject.internal.ConstructionProxy
[WARNING]   - com.google.inject.spi.InjectionPoint
[WARNING]   - com.google.inject.spi.StaticInjectionRequest
[WARNING]   - com.google.inject.internal.cglib.proxy.$FixedValueGenerator
[WARNING]   - com.google.inject.internal.cglib.proxy.$DispatcherGenerator
[WARNING]   - com.google.inject.spi.Elements$ElementsAsModule
[WARNING]   - 563 more...
[WARNING] parquet-hadoop-1.10.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 162 overlapping classes: 
[WARNING]   - org.apache.parquet.hadoop.mapred.DeprecatedParquetOutputFormat
[WARNING]   - org.apache.parquet.hadoop.api.WriteSupport$WriteContext
[WARNING]   - org.apache.parquet.format.converter.ParquetMetadataConverter$RangeMetadataFilter
[WARNING]   - org.apache.parquet.hadoop.ColumnChunkPageReadStore$ColumnChunkPageReader$1
[WARNING]   - org.apache.parquet.format.converter.ParquetMetadataConverter$2
[WARNING]   - org.apache.parquet.hadoop.metadata.ColumnChunkMetaData
[WARNING]   - org.apache.parquet.hadoop.api.WriteSupport$FinalizedWriteContext
[WARNING]   - org.apache.parquet.hadoop.util.HadoopPositionOutputStream
[WARNING]   - org.apache.parquet.HadoopReadOptions$1
[WARNING]   - org.apache.parquet.format.converter.ParquetMetadataConverter$NoFilter
[WARNING]   - 152 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, fastutil-6.6.5.jar define 10700 overlapping classes: 
[WARNING]   - it.unimi.dsi.fastutil.doubles.Double2IntRBTreeMap$Submap$KeySet
[WARNING]   - it.unimi.dsi.fastutil.longs.Long2CharAVLTreeMap$2$1
[WARNING]   - it.unimi.dsi.fastutil.bytes.Byte2ObjectLinkedOpenHashMap$EntryIterator
[WARNING]   - it.unimi.dsi.fastutil.ints.Int2ReferenceRBTreeMap$Submap
[WARNING]   - it.unimi.dsi.fastutil.shorts.Short2FloatOpenCustomHashMap$KeySet
[WARNING]   - it.unimi.dsi.fastutil.bytes.Byte2BooleanRBTreeMap$Submap$1
[WARNING]   - it.unimi.dsi.fastutil.floats.AbstractFloat2ShortSortedMap$ValuesCollection
[WARNING]   - it.unimi.dsi.fastutil.longs.Long2ReferenceRBTreeMap$Submap$2
[WARNING]   - it.unimi.dsi.fastutil.chars.AbstractChar2LongSortedMap$ValuesIterator
[WARNING]   - it.unimi.dsi.fastutil.doubles.DoubleHeaps
[WARNING]   - 10690 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, httpclient-4.5.7.jar define 467 overlapping classes: 
[WARNING]   - org.apache.http.impl.cookie.RFC2109Spec
[WARNING]   - org.apache.http.impl.execchain.MainClientExec
[WARNING]   - org.apache.http.conn.routing.RouteInfo$TunnelType
[WARNING]   - org.apache.http.client.methods.HttpGet
[WARNING]   - org.apache.http.impl.cookie.BrowserCompatSpecFactory
[WARNING]   - org.apache.http.impl.client.HttpAuthenticator
[WARNING]   - org.apache.http.conn.ManagedClientConnection
[WARNING]   - org.apache.http.client.protocol.RequestAuthCache
[WARNING]   - org.apache.http.conn.params.ConnConnectionParamBean
[WARNING]   - org.apache.http.impl.client.IdleConnectionEvictor
[WARNING]   - 457 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, commons-io-2.6.jar define 127 overlapping classes: 
[WARNING]   - org.apache.commons.io.FileCleaningTracker
[WARNING]   - org.apache.commons.io.comparator.SizeFileComparator
[WARNING]   - org.apache.commons.io.input.CloseShieldInputStream
[WARNING]   - org.apache.commons.io.ByteOrderParser
[WARNING]   - org.apache.commons.io.filefilter.EmptyFileFilter
[WARNING]   - org.apache.commons.io.monitor.FileEntry
[WARNING]   - org.apache.commons.io.output.ThresholdingOutputStream
[WARNING]   - org.apache.commons.io.input.TailerListener
[WARNING]   - org.apache.commons.io.IOExceptionWithCause
[WARNING]   - org.apache.commons.io.filefilter.NotFileFilter
[WARNING]   - 117 more...
[WARNING] htsjdk-2.19.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 993 overlapping classes: 
[WARNING]   - htsjdk.samtools.cram.ref.ReferenceSource
[WARNING]   - htsjdk.samtools.cram.compression.ExternalCompressor$3
[WARNING]   - htsjdk.samtools.HighAccuracyDownsamplingIterator
[WARNING]   - htsjdk.samtools.util.zip.DeflaterFactory
[WARNING]   - htsjdk.samtools.filter.DuplicateReadFilter
[WARNING]   - htsjdk.samtools.cram.encoding.core.huffmanUtils.HuffmanCode$1
[WARNING]   - htsjdk.samtools.cram.encoding.core.SubexponentialIntegerEncoding
[WARNING]   - htsjdk.variant.vcf.VCFEncoder
[WARNING]   - htsjdk.samtools.util.CloserUtil
[WARNING]   - htsjdk.tribble.TribbleException$FeatureFileDoesntExist
[WARNING]   - 983 more...
[WARNING] parquet-jackson-1.10.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 623 overlapping classes: 
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.InjectableValues$Std
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.introspect.POJOPropertyBuilder$Node
[WARNING]   - shaded.parquet.org.codehaus.jackson.util.TokenBuffer$1
[WARNING]   - shaded.parquet.org.codehaus.jackson.type.TypeReference
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.deser.std.FromStringDeserializer$LocaleDeserializer
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.ser.BasicSerializerFactory
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.deser.StdKeyDeserializer
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.deser.std.StdKeyDeserializer$EnumKD
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.ser.std.InetAddressSerializer
[WARNING]   - shaded.parquet.org.codehaus.jackson.map.jsontype.impl.StdTypeResolverBuilder
[WARNING]   - 613 more...
[WARNING] commons-jexl-2.1.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 178 overlapping classes: 
[WARNING]   - org.apache.commons.jexl2.internal.AbstractExecutor$Get
[WARNING]   - org.apache.commons.jexl2.introspection.JexlPropertyGet
[WARNING]   - org.apache.commons.jexl2.parser.StringParser
[WARNING]   - org.apache.commons.jexl2.parser.ASTBitwiseOrNode
[WARNING]   - org.apache.commons.jexl2.internal.introspection.MethodKey$1
[WARNING]   - org.apache.commons.jexl2.Main
[WARNING]   - org.apache.commons.jexl2.parser.ASTForeachStatement
[WARNING]   - org.apache.commons.jexl2.introspection.Sandbox
[WARNING]   - org.apache.commons.jexl2.internal.introspection.ClassMap
[WARNING]   - org.apache.commons.jexl2.parser.ASTFunctionNode
[WARNING]   - 168 more...
[WARNING] utils-cli-spark2_2.11-0.3.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 19 overlapping classes: 
[WARNING]   - org.bdgenomics.utils.cli.Args4j
[WARNING]   - org.bdgenomics.utils.cli.ParquetArgs
[WARNING]   - org.bdgenomics.utils.cli.BDGSparkCommand$class
[WARNING]   - org.bdgenomics.utils.cli.BDGSparkCommand$$anonfun$run$1
[WARNING]   - org.bdgenomics.utils.cli.ParquetArgs$class
[WARNING]   - org.bdgenomics.utils.cli.SaveArgs
[WARNING]   - org.bdgenomics.utils.cli.Args4jBase
[WARNING]   - org.bdgenomics.utils.cli.ParquetSaveArgs
[WARNING]   - org.bdgenomics.utils.cli.ParquetLoadSaveArgs
[WARNING]   - org.bdgenomics.utils.cli.BDGCommandCompanion
[WARNING]   - 9 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, kryo-2.24.0.jar define 193 overlapping classes: 
[WARNING]   - com.esotericsoftware.kryo.serializers.BeanSerializer$1
[WARNING]   - com.esotericsoftware.kryo.Registration
[WARNING]   - com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.Handler
[WARNING]   - com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ByteVector
[WARNING]   - com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.FieldVisitor
[WARNING]   - com.esotericsoftware.kryo.util.IntMap$Values
[WARNING]   - com.esotericsoftware.kryo.serializers.DefaultSerializers$IntSerializer
[WARNING]   - com.esotericsoftware.kryo.serializers.FieldSerializerUnsafeUtilImpl
[WARNING]   - com.esotericsoftware.kryo.serializers.JavaSerializer
[WARNING]   - com.esotericsoftware.kryo.serializers.ObjectField$ObjectIntField
[WARNING]   - 183 more...
[WARNING] parquet-common-1.10.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 83 overlapping classes: 
[WARNING]   - org.apache.parquet.SemanticVersion$SemanticVersionParseException
[WARNING]   - org.apache.parquet.bytes.SingleBufferInputStream
[WARNING]   - org.apache.parquet.Ints
[WARNING]   - org.apache.parquet.Version
[WARNING]   - org.apache.parquet.SemanticVersion$NumberOrString
[WARNING]   - org.apache.parquet.glob.GlobNode$Atom
[WARNING]   - org.apache.parquet.util.DynMethods$Builder
[WARNING]   - org.apache.parquet.bytes.BytesInput$EmptyBytesInput
[WARNING]   - org.apache.parquet.Exceptions
[WARNING]   - org.apache.parquet.bytes.MultiBufferInputStream$ConcatIterator
[WARNING]   - 73 more...
[WARNING] j2objc-annotations-1.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 12 overlapping classes: 
[WARNING]   - com.google.j2objc.annotations.Property
[WARNING]   - com.google.j2objc.annotations.RetainedWith
[WARNING]   - com.google.j2objc.annotations.RetainedLocalRef
[WARNING]   - com.google.j2objc.annotations.J2ObjCIncompatible
[WARNING]   - com.google.j2objc.annotations.AutoreleasePool
[WARNING]   - com.google.j2objc.annotations.LoopTranslation$LoopStyle
[WARNING]   - com.google.j2objc.annotations.ReflectionSupport$Level
[WARNING]   - com.google.j2objc.annotations.ReflectionSupport
[WARNING]   - com.google.j2objc.annotations.WeakOuter
[WARNING]   - com.google.j2objc.annotations.Weak
[WARNING]   - 2 more...
[WARNING] commons-codec-1.11.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 96 overlapping classes: 
[WARNING]   - org.apache.commons.codec.language.Nysiis
[WARNING]   - org.apache.commons.codec.language.bm.Rule$1
[WARNING]   - org.apache.commons.codec.language.bm.Rule$RPattern
[WARNING]   - org.apache.commons.codec.language.ColognePhonetic$CologneInputBuffer
[WARNING]   - org.apache.commons.codec.digest.HmacUtils
[WARNING]   - org.apache.commons.codec.language.bm.BeiderMorseEncoder
[WARNING]   - org.apache.commons.codec.digest.UnixCrypt
[WARNING]   - org.apache.commons.codec.language.Soundex
[WARNING]   - org.apache.commons.codec.cli.Digest
[WARNING]   - org.apache.commons.codec.binary.BinaryCodec
[WARNING]   - 86 more...
[WARNING] xz-1.5.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 105 overlapping classes: 
[WARNING]   - org.tukaani.xz.lzma.LZMADecoder$LengthDecoder
[WARNING]   - org.tukaani.xz.index.IndexDecoder
[WARNING]   - org.tukaani.xz.lzma.LZMADecoder
[WARNING]   - org.tukaani.xz.lzma.LZMAEncoderFast
[WARNING]   - org.tukaani.xz.lzma.LZMAEncoder$LengthEncoder
[WARNING]   - org.tukaani.xz.BlockOutputStream
[WARNING]   - org.tukaani.xz.simple.SimpleFilter
[WARNING]   - org.tukaani.xz.rangecoder.RangeCoder
[WARNING]   - org.tukaani.xz.XZOutputStream
[WARNING]   - org.tukaani.xz.UncompressedLZMA2OutputStream
[WARNING]   - 95 more...
[WARNING] animal-sniffer-annotations-1.17.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 1 overlapping classes: 
[WARNING]   - org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement
[WARNING] jsr203hadoop-1.0.3.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 27 overlapping classes: 
[WARNING]   - hdfs.jsr203.HadoopDirectoryStream
[WARNING]   - hdfs.jsr203.HadoopPath
[WARNING]   - hdfs.jsr203.HadoopFileSystem$1
[WARNING]   - hdfs.jsr203.HadoopFileOwnerAttributeView
[WARNING]   - hdfs.jsr203.HadoopUserPrincipal
[WARNING]   - hdfs.jsr203.IAttributeReader
[WARNING]   - hdfs.jsr203.HadoopPath$1
[WARNING]   - hdfs.jsr203.HadoopBasicFileAttributes
[WARNING]   - hdfs.jsr203.HadoopDirectoryStream$1
[WARNING]   - hdfs.jsr203.package-info
[WARNING]   - 17 more...
[WARNING] scala-reflect-2.11.12.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 2829 overlapping classes: 
[WARNING]   - scala.reflect.runtime.ReflectionUtils
[WARNING]   - scala.reflect.internal.Scopes$LookupInaccessible$
[WARNING]   - scala.reflect.internal.Types$LazyType
[WARNING]   - scala.reflect.internal.Definitions$DefinitionsClass$$anonfun$newT1NullaryMethod$1
[WARNING]   - scala.reflect.internal.SymbolPairs$Cursor
[WARNING]   - scala.reflect.internal.Types$StaticallyAnnotatedType$
[WARNING]   - scala.reflect.runtime.SynchronizedOps$SynchronizedScope$$anonfun$isEmpty$1
[WARNING]   - scala.reflect.internal.Kinds$TypeConKind$$anonfun$buildState$3
[WARNING]   - scala.reflect.api.StandardLiftables$StandardUnliftableInstances$$anonfun$unliftTuple18$1
[WARNING]   - scala.reflect.runtime.SynchronizedSymbols$SynchronizedClassSymbol
[WARNING]   - 2819 more...
[WARNING] adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar, objenesis-2.1.jar define 37 overlapping classes: 
[WARNING]   - org.objenesis.ObjenesisBase
[WARNING]   - org.objenesis.instantiator.gcj.GCJInstantiator
[WARNING]   - org.objenesis.strategy.SingleInstantiatorStrategy
[WARNING]   - org.objenesis.ObjenesisHelper
[WARNING]   - org.objenesis.instantiator.sun.SunReflectionFactoryHelper
[WARNING]   - org.objenesis.instantiator.jrockit.JRockitLegacyInstantiator
[WARNING]   - org.objenesis.instantiator.sun.SunReflectionFactoryInstantiator
[WARNING]   - org.objenesis.instantiator.basic.NullInstantiator
[WARNING]   - org.objenesis.instantiator.android.Android17Instantiator
[WARNING]   - org.objenesis.instantiator.ObjectInstantiator
[WARNING]   - 27 more...
[WARNING] httpcore-4.4.11.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 252 overlapping classes: 
[WARNING]   - org.apache.http.protocol.HttpRequestHandler
[WARNING]   - org.apache.http.impl.io.ChunkedOutputStream
[WARNING]   - org.apache.http.protocol.ChainBuilder
[WARNING]   - org.apache.http.impl.entity.DisallowIdentityContentLengthStrategy
[WARNING]   - org.apache.http.impl.ConnSupport
[WARNING]   - org.apache.http.impl.io.DefaultHttpResponseParserFactory
[WARNING]   - org.apache.http.HttpClientConnection
[WARNING]   - org.apache.http.NameValuePair
[WARNING]   - org.apache.http.protocol.HttpExpectationVerifier
[WARNING]   - org.apache.http.impl.io.AbstractMessageWriter
[WARNING]   - 242 more...
[WARNING] adam-apis-spark2_2.11-0.33.0-SNAPSHOT.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 205 overlapping classes: 
[WARNING]   - org.bdgenomics.adam.api.java.FeaturesToFragmentsConverter
[WARNING]   - org.bdgenomics.adam.api.java.ToVariantDatasetConversion$class
[WARNING]   - org.bdgenomics.adam.api.java.ToCoverageDatasetConversion$class
[WARNING]   - org.bdgenomics.adam.api.java.FeaturesToVariantsDatasetConverter
[WARNING]   - org.bdgenomics.adam.api.java.ToFragmentDatasetConversion$$typecreator3$1
[WARNING]   - org.bdgenomics.adam.api.java.ToSliceDatasetConversion$class
[WARNING]   - org.bdgenomics.adam.api.java.AlignmentsToFeaturesConverter
[WARNING]   - org.bdgenomics.adam.api.java.FeaturesToReadsDatasetConverter
[WARNING]   - org.bdgenomics.adam.api.java.VariantsToVariantsConverter
[WARNING]   - org.bdgenomics.adam.api.java.CoverageToReadsDatasetConverter
[WARNING]   - 195 more...
[WARNING] guava-27.0-jre.jar, failureaccess-1.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 2 overlapping classes: 
[WARNING]   - com.google.common.util.concurrent.internal.InternalFutureFailureAccess
[WARNING]   - com.google.common.util.concurrent.internal.InternalFutures
[WARNING] paranamer-2.8.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 21 overlapping classes: 
[WARNING]   - com.thoughtworks.paranamer.PositionalParanamer
[WARNING]   - com.thoughtworks.paranamer.JavadocParanamer
[WARNING]   - com.thoughtworks.paranamer.BytecodeReadingParanamer
[WARNING]   - com.thoughtworks.paranamer.BytecodeReadingParanamer$Type
[WARNING]   - com.thoughtworks.paranamer.BytecodeReadingParanamer$1
[WARNING]   - com.thoughtworks.paranamer.JavadocParanamer$DirJavadocProvider
[WARNING]   - com.thoughtworks.paranamer.AnnotationParanamer$Jsr330Helper
[WARNING]   - com.thoughtworks.paranamer.BytecodeReadingParanamer$TypeCollector
[WARNING]   - com.thoughtworks.paranamer.AnnotationParanamer
[WARNING]   - com.thoughtworks.paranamer.NullParanamer
[WARNING]   - 11 more...
[WARNING] adam-cli-spark2_2.11-0.33.0-SNAPSHOT.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 152 overlapping classes: 
[WARNING]   - org.bdgenomics.adam.cli.PrintADAMArgs
[WARNING]   - org.bdgenomics.adam.cli.TransformVariants$$anonfun$maybeCoalesce$2
[WARNING]   - org.bdgenomics.adam.cli.FlagStat$
[WARNING]   - org.bdgenomics.adam.cli.TransformAlignments$$anonfun$7
[WARNING]   - org.bdgenomics.adam.cli.TransformAlignments$$anonfun$11
[WARNING]   - org.bdgenomics.adam.cli.TransformGenotypes$$anonfun$run$2
[WARNING]   - org.bdgenomics.adam.cli.TransformVariants$
[WARNING]   - org.bdgenomics.adam.cli.View$$anonfun$getFilter$1$1
[WARNING]   - org.bdgenomics.adam.cli.Coverage$$anonfun$run$1
[WARNING]   - org.bdgenomics.adam.cli.TransformAlignments$$anonfun$maybeCoalesce$2
[WARNING]   - 142 more...
[WARNING] slf4j-api-1.7.30.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 34 overlapping classes: 
[WARNING]   - org.slf4j.helpers.SubstituteLogger
[WARNING]   - org.slf4j.helpers.NamedLoggerBase
[WARNING]   - org.slf4j.helpers.NOPMDCAdapter
[WARNING]   - org.slf4j.MarkerFactory
[WARNING]   - org.slf4j.spi.LoggerFactoryBinder
[WARNING]   - org.slf4j.helpers.BasicMarker
[WARNING]   - org.slf4j.MDC$MDCCloseable
[WARNING]   - org.slf4j.spi.LocationAwareLogger
[WARNING]   - org.slf4j.helpers.MessageFormatter
[WARNING]   - org.slf4j.helpers.Util$ClassContextSecurityManager
[WARNING]   - 24 more...
[WARNING] scala-guice_2.11-4.2.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 95 overlapping classes: 
[WARNING]   - net.codingwell.scalaguice.ScalaModule$
[WARNING]   - net.codingwell.scalaguice.ScalaModule$ScalaScopedBindingBuilder
[WARNING]   - net.codingwell.scalaguice.InjectorExtensions$ScalaInjector$$typecreator2$1
[WARNING]   - net.codingwell.scalaguice.ScalaModule$ScalaLinkedBindingBuilder$$anon$2$$typecreator1$1
[WARNING]   - net.codingwell.scalaguice.binder.ScopedBindingBuilderProxy
[WARNING]   - net.codingwell.scalaguice.InternalModule$BindingBuilder$$typecreator1$2
[WARNING]   - net.codingwell.scalaguice.ScalaModule$$anonfun$filterTrace$2
[WARNING]   - net.codingwell.scalaguice.ScalaPrivateModule$ElementBuilder
[WARNING]   - net.codingwell.scalaguice.binder.LinkedBindingBuilderProxy$class
[WARNING]   - net.codingwell.scalaguice.ScalaModule$ScalaAnnotatedBindingBuilder
[WARNING]   - 85 more...
[WARNING] commons-compress-1.8.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 191 overlapping classes: 
[WARNING]   - org.apache.commons.compress.archivers.sevenz.SevenZArchiveEntry
[WARNING]   - org.apache.commons.compress.archivers.dump.ShortFileException
[WARNING]   - org.apache.commons.compress.utils.CountingInputStream
[WARNING]   - org.apache.commons.compress.compressors.bzip2.CRC
[WARNING]   - org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream
[WARNING]   - org.apache.commons.compress.archivers.dump.DumpArchiveEntry
[WARNING]   - org.apache.commons.compress.changes.ChangeSetPerformer$ArchiveEntryIterator
[WARNING]   - org.apache.commons.compress.compressors.bzip2.BlockSort
[WARNING]   - org.apache.commons.compress.archivers.tar.TarArchiveEntry
[WARNING]   - org.apache.commons.compress.archivers.dump.UnsupportedCompressionAlgorithmException
[WARNING]   - 181 more...
[WARNING] adam-codegen-spark2_2.11-0.33.0-SNAPSHOT.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 18 overlapping classes: 
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProjectionEnums$
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProjectionEnums
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$$anonfun$getters$1
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$$anonfun$setters$1
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$$anonfun$1
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$$anonfun$apply$1
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProduct$$anonfun$fields$1
[WARNING]   - org.bdgenomics.adam.codegen.DumpSchemasToProjectionEnums$$anonfun$fields$1
[WARNING]   - 8 more...
[WARNING] commons-logging-1.2.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 28 overlapping classes: 
[WARNING]   - org.apache.commons.logging.LogSource
[WARNING]   - org.apache.commons.logging.impl.ServletContextCleaner
[WARNING]   - org.apache.commons.logging.Log
[WARNING]   - org.apache.commons.logging.LogFactory$3
[WARNING]   - org.apache.commons.logging.impl.LogFactoryImpl$2
[WARNING]   - org.apache.commons.logging.impl.LogKitLogger
[WARNING]   - org.apache.commons.logging.impl.Jdk14Logger
[WARNING]   - org.apache.commons.logging.LogConfigurationException
[WARNING]   - org.apache.commons.logging.impl.WeakHashtable$Referenced
[WARNING]   - org.apache.commons.logging.impl.WeakHashtable$WeakKey
[WARNING]   - 18 more...
[WARNING] minlog-1.2.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 2 overlapping classes: 
[WARNING]   - com.esotericsoftware.minlog.Log
[WARNING]   - com.esotericsoftware.minlog.Log$Logger
[WARNING] avro-1.8.2.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 1172 overlapping classes: 
[WARNING]   - org.apache.avro.message.SchemaStore
[WARNING]   - avro.shaded.com.google.common.collect.SingletonImmutableList
[WARNING]   - org.apache.avro.io.EncoderFactory$DefaultEncoderFactory
[WARNING]   - avro.shaded.com.google.common.collect.Iterables$15
[WARNING]   - org.apache.avro.GuavaClasses
[WARNING]   - avro.shaded.com.google.common.collect.Sets$PowerSet$1$1
[WARNING]   - avro.shaded.com.google.common.collect.RegularImmutableMap
[WARNING]   - org.apache.avro.generic.GenericDatumReader$2
[WARNING]   - avro.shaded.com.google.common.collect.Synchronized$SynchronizedSortedSet
[WARNING]   - org.apache.avro.file.BZip2Codec
[WARNING]   - 1162 more...
[WARNING] hadoop-bam-7.9.2.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 115 overlapping classes: 
[WARNING]   - org.seqdoop.hadoop_bam.BAMSplitGuesser
[WARNING]   - org.seqdoop.hadoop_bam.util.SAMHeaderReader
[WARNING]   - org.seqdoop.hadoop_bam.QseqInputFormat
[WARNING]   - org.seqdoop.hadoop_bam.KeyIgnoringBCFRecordWriter
[WARNING]   - org.seqdoop.hadoop_bam.FastaInputFormat$1
[WARNING]   - org.seqdoop.hadoop_bam.util.SAMOutputPreparer$1
[WARNING]   - org.seqdoop.hadoop_bam.QseqOutputFormat$QseqRecordWriter
[WARNING]   - org.seqdoop.hadoop_bam.FastaInputFormat
[WARNING]   - org.seqdoop.hadoop_bam.LineReader
[WARNING]   - org.seqdoop.hadoop_bam.util.BGZFCodec
[WARNING]   - 105 more...
[WARNING] parquet-column-1.10.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 792 overlapping classes: 
[WARNING]   - org.apache.parquet.it.unimi.dsi.fastutil.longs.LongComparator
[WARNING]   - org.apache.parquet.column.values.dictionary.DictionaryValuesWriter$PlainIntegerDictionaryValuesWriter
[WARNING]   - org.apache.parquet.io.PrimitiveColumnIO
[WARNING]   - org.apache.parquet.io.api.Binary$ByteBufferBackedBinary
[WARNING]   - org.apache.parquet.it.unimi.dsi.fastutil.doubles.DoubleSortedSet
[WARNING]   - org.apache.parquet.io.BaseRecordReader
[WARNING]   - org.apache.parquet.column.ParquetProperties$1
[WARNING]   - org.apache.parquet.column.UnknownColumnException
[WARNING]   - org.apache.parquet.filter.ColumnPredicates$12
[WARNING]   - org.apache.parquet.schema.Types$BaseMapBuilder$ListValueBuilder
[WARNING]   - 782 more...
[WARNING] utils-misc-spark2_2.11-0.3.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 5 overlapping classes: 
[WARNING]   - org.bdgenomics.utils.misc.MathUtils
[WARNING]   - org.bdgenomics.utils.misc.MathUtils$$anonfun$scalarArrayMultiply$1
[WARNING]   - org.bdgenomics.utils.misc.MathUtils$$anonfun$aggregateArray$1
[WARNING]   - org.bdgenomics.utils.misc.MathUtils$$anonfun$softmax$1
[WARNING]   - org.bdgenomics.utils.misc.MathUtils$
[WARNING] grizzled-slf4j_2.11-1.3.4.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 4 overlapping classes: 
[WARNING]   - grizzled.slf4j.Logger$
[WARNING]   - grizzled.slf4j.Logging
[WARNING]   - grizzled.slf4j.Logging$class
[WARNING]   - grizzled.slf4j.Logger
[WARNING] guava-27.0-jre.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 1955 overlapping classes: 
[WARNING]   - com.google.common.collect.CompactHashMap$Itr
[WARNING]   - com.google.common.collect.ImmutableMapValues$1
[WARNING]   - com.google.common.util.concurrent.AbstractService$5
[WARNING]   - com.google.common.io.LineProcessor
[WARNING]   - com.google.common.io.BaseEncoding$StandardBaseEncoding$2
[WARNING]   - com.google.common.io.ByteProcessor
[WARNING]   - com.google.common.math.package-info
[WARNING]   - com.google.common.util.concurrent.SimpleTimeLimiter
[WARNING]   - com.google.common.cache.AbstractCache$StatsCounter
[WARNING]   - com.google.common.util.concurrent.CycleDetectingLockFactory$Policies
[WARNING]   - 1945 more...
[WARNING] parquet-encoding-1.10.1.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 305 overlapping classes: 
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingForLongBE$Packer25
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingBE$Packer14
[WARNING]   - org.apache.parquet.column.values.bitpacking.BytePackerForLong
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingLE$Packer10
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingForLongLE$Packer54
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingForLongLE$Packer41
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingLE$Packer30
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingLE$Packer23
[WARNING]   - org.apache.parquet.column.values.bitpacking.LemireBitPackingLE$Packer19
[WARNING]   - org.apache.parquet.column.values.bitpacking.ByteBitPackingForLongLE$Packer21
[WARNING]   - 295 more...
[WARNING] checker-qual-2.5.2.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 302 overlapping classes: 
[WARNING]   - org.checkerframework.checker.formatter.FormatUtil
[WARNING]   - org.checkerframework.checker.units.qual.MixedUnits
[WARNING]   - org.checkerframework.checker.regex.qual.PolyRegex
[WARNING]   - org.checkerframework.checker.units.qual.PolyUnit
[WARNING]   - org.checkerframework.checker.formatter.FormatUtil$IllegalFormatConversionCategoryException
[WARNING]   - org.checkerframework.framework.qual.Unqualified
[WARNING]   - org.checkerframework.checker.units.qual.C
[WARNING]   - org.checkerframework.common.reflection.qual.UnknownMethod
[WARNING]   - org.checkerframework.framework.qual.EnsuresQualifierIf
[WARNING]   - org.checkerframework.checker.signedness.SignednessUtil
[WARNING]   - 292 more...
[WARNING] parquet-format-2.4.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 461 overlapping classes: 
[WARNING]   - shaded.parquet.org.apache.thrift.transport.TSimpleFileTransport
[WARNING]   - shaded.parquet.org.apache.thrift.transport.TServerSocket
[WARNING]   - shaded.parquet.org.apache.thrift.transport.TFileTransport$TruncableBufferedInputStream
[WARNING]   - shaded.parquet.org.apache.thrift.TFieldIdEnum
[WARNING]   - org.apache.parquet.format.SchemaElement$SchemaElementStandardScheme
[WARNING]   - shaded.parquet.org.apache.thrift.server.AbstractNonblockingServer$AbstractSelectThread
[WARNING]   - shaded.parquet.org.apache.thrift.TEnumHelper
[WARNING]   - org.apache.parquet.format.JsonType$JsonTypeStandardScheme
[WARNING]   - org.apache.parquet.format.DictionaryPageHeader$DictionaryPageHeaderTupleScheme
[WARNING]   - org.apache.parquet.format.UUIDType$1
[WARNING]   - 451 more...
[WARNING] snappy-java-1.1.1.3.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 19 overlapping classes: 
[WARNING]   - org.xerial.snappy.SnappyLoader
[WARNING]   - org.xerial.snappy.SnappyFramedInputStream$FrameMetaData
[WARNING]   - org.xerial.snappy.SnappyFramedInputStream
[WARNING]   - org.xerial.snappy.SnappyOutputStream
[WARNING]   - org.xerial.snappy.SnappyErrorCode
[WARNING]   - org.xerial.snappy.SnappyBundleActivator
[WARNING]   - org.xerial.snappy.SnappyFramedOutputStream
[WARNING]   - org.xerial.snappy.BufferRecycler
[WARNING]   - org.xerial.snappy.SnappyError
[WARNING]   - org.xerial.snappy.SnappyFramedInputStream$FrameAction
[WARNING]   - 9 more...
[WARNING] jackson-core-asl-1.9.13.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 121 overlapping classes: 
[WARNING]   - org.codehaus.jackson.annotate.JsonManagedReference
[WARNING]   - org.codehaus.jackson.util.DefaultPrettyPrinter$FixedSpaceIndenter
[WARNING]   - org.codehaus.jackson.JsonGenerationException
[WARNING]   - org.codehaus.jackson.util.BufferRecycler$CharBufferType
[WARNING]   - org.codehaus.jackson.io.UTF32Reader
[WARNING]   - org.codehaus.jackson.sym.Name1
[WARNING]   - org.codehaus.jackson.util.MinimalPrettyPrinter
[WARNING]   - org.codehaus.jackson.impl.JsonParserBase
[WARNING]   - org.codehaus.jackson.sym.CharsToNameCanonicalizer$Bucket
[WARNING]   - org.codehaus.jackson.annotate.JsonValue
[WARNING]   - 111 more...
[WARNING] utils-intervalrdd-spark2_2.11-0.3.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 51 overlapping classes: 
[WARNING]   - org.bdgenomics.utils.interval.array.IntervalArray$
[WARNING]   - org.bdgenomics.utils.interval.rdd.IntervalRDD$$anonfun$collect$2
[WARNING]   - org.bdgenomics.utils.interval.array.IntervalArray$$anonfun$4
[WARNING]   - org.bdgenomics.utils.interval.array.Interval$class
[WARNING]   - org.bdgenomics.utils.interval.array.IntervalArray$$anonfun$get$1
[WARNING]   - org.bdgenomics.utils.interval.rdd.IntervalRDD$$anonfun$4
[WARNING]   - org.bdgenomics.utils.interval.array.IntervalArray$$anonfun$1
[WARNING]   - org.bdgenomics.utils.interval.rdd.IntervalRDD$
[WARNING]   - org.bdgenomics.utils.interval.rdd.IntervalRDD$$anonfun$filter$1
[WARNING]   - org.bdgenomics.utils.interval.array.IntervalArray$$anonfun$mapValues$1
[WARNING]   - 41 more...
[WARNING] bdg-formats-0.15.0.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 58 overlapping classes: 
[WARNING]   - org.bdgenomics.formats.avro.VariantCallingAnnotations$Builder
[WARNING]   - org.bdgenomics.formats.avro.VariantAnnotation$Builder
[WARNING]   - org.bdgenomics.formats.avro.Reference$1
[WARNING]   - org.bdgenomics.formats.avro.VariantAnnotationMessage
[WARNING]   - org.bdgenomics.formats.avro.Alignment$Builder
[WARNING]   - org.bdgenomics.formats.avro.OntologyTerm$1
[WARNING]   - org.bdgenomics.formats.avro.Alignment$1
[WARNING]   - org.bdgenomics.formats.avro.Feature$1
[WARNING]   - org.bdgenomics.formats.avro.Sequence$1
[WARNING]   - org.bdgenomics.formats.avro.ReadGroup$Builder
[WARNING]   - 48 more...
[WARNING] adam-core-spark2_2.11-0.33.0-SNAPSHOT.jar, adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar define 2916 overlapping classes: 
[WARNING]   - org.bdgenomics.adam.rdd.feature.FeatureDataset$$anonfun$filterToTranscript$2$$anonfun$apply$9$$anonfun$apply$10
[WARNING]   - org.bdgenomics.adam.models.SnpTable$$anonfun$2
[WARNING]   - org.bdgenomics.adam.sql.VariantCallingAnnotations$$anonfun$toAvro$58
[WARNING]   - org.bdgenomics.adam.rdd.GenomicDataset$$anonfun$shuffleRegionJoin$2
[WARNING]   - org.bdgenomics.adam.rdd.feature.DatasetBoundFeatureDataset$$anonfun$filterByAttribute$1
[WARNING]   - org.bdgenomics.adam.converters.VariantContextConverter$$anonfun$63
[WARNING]   - org.bdgenomics.adam.io.InterleavedFastqInputFormat
[WARNING]   - org.bdgenomics.adam.util.FileMerger$$anonfun$mergeFilesAcrossFilesystems$4
[WARNING]   - org.bdgenomics.adam.rdd.fragment.DatasetBoundFragmentDataset
[WARNING]   - org.bdgenomics.adam.converters.VariantContextConverter$$anonfun$extractStrandBiasComponents$1
[WARNING]   - 2906 more...
[WARNING] maven-shade-plugin has detected that some class files are
[WARNING] present in two or more JARs. When this happens, only one
[WARNING] single version of the class is copied to the uber jar.
[WARNING] Usually this is not harmful and you can skip these warnings,
[WARNING] otherwise try to manually exclude artifacts based on
[WARNING] mvn dependency:tree -Ddetail=true and the above output.
[WARNING] See http://maven.apache.org/plugins/maven-shade-plugin/
[INFO] Replacing original artifact with shaded artifact.
[INFO] Replacing /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT-shaded.jar
[INFO] 
[INFO] ---------------< org.bdgenomics.adam:adam-r-spark2_2.11 >---------------
[INFO] Building ADAM_2.11: R APIs 0.33.0-SNAPSHOT                         [8/8]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-versions) @ adam-r-spark2_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (enforce-maven) @ adam-r-spark2_2.11 ---
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-r-spark2_2.11 ---
[INFO] Modified 0 of 0 .scala files
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ adam-r-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-r/src/main/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ adam-r-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- exec-maven-plugin:1.5.0:exec (doc-r) @ adam-r-spark2_2.11 ---

R version 3.6.3 (2020-02-29) -- "Holding the Windsock"
Copyright (C) 2020 The R Foundation for Statistical Computing
Platform: x86_64-pc-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

  Natural language support but running in an English locale

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> library(devtools);devtools::document()
Loading required package: usethis
Updating bdgenomics.adam documentation
Loading bdgenomics.adam
Creating a new generic function for ‘pipe’ in package ‘bdgenomics.adam’
Creating a new generic function for ‘transform’ in package ‘bdgenomics.adam’
Creating a new generic function for ‘save’ in package ‘bdgenomics.adam’
Creating a new generic function for ‘sort’ in package ‘bdgenomics.adam’

Attaching package: ‘SparkR’

The following objects are masked from ‘package:stats’:

    cov, filter, lag, na.omit, predict, sd, var, window

The following objects are masked from ‘package:base’:

    as.data.frame, colnames, colnames<-, drop, endsWith, intersect,
    rank, rbind, sample, startsWith, subset, summary, transform, union

Writing NAMESPACE
Writing NAMESPACE
Writing ADAMContext.Rd
Writing GenomicDataset.Rd
Writing AlignmentDataset.Rd
Writing CoverageDataset.Rd
Writing FragmentDataset.Rd
Writing toVariantContexts.Rd
Writing toVariants.Rd
Writing SliceDataset.Rd
Writing VariantContextDataset.Rd
Writing createADAMContext.Rd
Writing loadAlignments-ADAMContext-character-method.Rd
Writing loadDnaSequences-ADAMContext-character-method.Rd
Writing loadProteinSequences-ADAMContext-character-method.Rd
Writing loadRnaSequences-ADAMContext-character-method.Rd
Writing loadSlices-ADAMContext-character-method.Rd
Writing loadFragments-ADAMContext-character-method.Rd
Writing loadFeatures-ADAMContext-character-method.Rd
Writing loadCoverage-ADAMContext-character-method.Rd
Writing loadGenotypes-ADAMContext-character-method.Rd
Writing loadVariants-ADAMContext-character-method.Rd
Writing FeatureDataset.Rd
Writing GenotypeDataset.Rd
Writing SequenceDataset.Rd
Writing VariantDataset.Rd
Writing pipe-GenomicDataset-ANY-character-character-character-method.Rd
Writing cache-GenomicDataset-method.Rd
Writing persist-GenomicDataset-character-method.Rd
Writing unpersist-GenomicDataset-method.Rd
Writing sort-GenomicDataset-method.Rd
Writing sortLexicographically-GenomicDataset-method.Rd
Writing toDF-GenomicDataset-method.Rd
Writing transform-GenomicDataset-function-method.Rd
Writing transmute-GenomicDataset-function-character-method.Rd
Writing broadcastRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing rightOuterBroadcastRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing broadcastRegionJoinAndGroupByRight-GenomicDataset-GenomicDataset-method.Rd
Writing rightOuterBroadcastRegionJoinAndGroupByRight-GenomicDataset-GenomicDataset-method.Rd
Writing shuffleRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing rightOuterShuffleRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing leftOuterShuffleRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing leftOuterShuffleRegionJoinAndGroupByLeft-GenomicDataset-GenomicDataset-method.Rd
Writing fullOuterShuffleRegionJoin-GenomicDataset-GenomicDataset-method.Rd
Writing rightOuterShuffleRegionJoinAndGroupByLeft-GenomicDataset-GenomicDataset-method.Rd
Writing shuffleRegionJoinAndGroupByLeft-GenomicDataset-GenomicDataset-method.Rd
Writing toFragments-AlignmentDataset-method.Rd
Writing saveAsSam-AlignmentDataset-character-method.Rd
Writing toCoverage-AlignmentDataset-method.Rd
Writing save-AlignmentDataset-character-method.Rd
Writing countKmers-AlignmentDataset-numeric-method.Rd
Writing sortByReadName-AlignmentDataset-method.Rd
Writing sortByReferencePosition-AlignmentDataset-method.Rd
Writing sortByReferencePositionAndIndex-AlignmentDataset-method.Rd
Writing markDuplicates-AlignmentDataset-method.Rd
Writing recalibrateBaseQualities-AlignmentDataset-VariantDataset-character-method.Rd
Writing realignIndels-AlignmentDataset-method.Rd
Writing save-CoverageDataset-character-method.Rd
Writing collapse-CoverageDataset-method.Rd
Writing toFeatures-CoverageDataset-method.Rd
Writing coverage-CoverageDataset-method.Rd
Writing flatten-CoverageDataset-method.Rd
Writing save-FeatureDataset-character-method.Rd
Writing toCoverage-FeatureDataset-method.Rd
Writing toAlignments-FragmentDataset-method.Rd
Writing markDuplicates-FragmentDataset-method.Rd
Writing save-FragmentDataset-character-method.Rd
Writing saveAsParquet-GenotypeDataset-character-method.Rd
Writing toVariants-GenotypeDataset-method.Rd
Writing toVariantContexts-GenotypeDataset-method.Rd
Writing save-SequenceDataset-character-method.Rd
Writing save-SliceDataset-character-method.Rd
Writing flankAdjacentFragments-SliceDataset-numeric-method.Rd
Writing saveAsParquet-VariantDataset-character-method.Rd
Writing toVariantContexts-VariantDataset-method.Rd
Writing saveAsVcf-VariantContextDataset-character-method.Rd
> 
> 
Warning message:
roxygen2 requires Encoding: UTF-8 
[INFO] 
[INFO] --- exec-maven-plugin:1.5.0:exec (dev-r) @ adam-r-spark2_2.11 ---
* checking for file ‘bdgenomics.adam/DESCRIPTION’ ... OK
* preparing ‘bdgenomics.adam’:
* checking DESCRIPTION meta-information ... OK
* checking for LF line-endings in source and make files and shell scripts
* checking for empty or unneeded directories
* building ‘bdgenomics.adam_0.32.0.tar.gz’
Warning in utils::tar(filepath, pkgname, compression = compression, compression_level = 9L,  :
  storing paths of more than 100 bytes is not portable:
  ‘bdgenomics.adam/man/rightOuterBroadcastRegionJoinAndGroupByRight-GenomicDataset-GenomicDataset-method.Rd’
Warning in utils::tar(filepath, pkgname, compression = compression, compression_level = 9L,  :
  storing paths of more than 100 bytes is not portable:
  ‘bdgenomics.adam/man/rightOuterShuffleRegionJoinAndGroupByLeft-GenomicDataset-GenomicDataset-method.Rd’

[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ adam-r-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ adam-r-spark2_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-r/src/test/resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ adam-r-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- exec-maven-plugin:1.5.0:exec (test-r) @ adam-r-spark2_2.11 ---
* using log directory ‘/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-r/bdgenomics.adam.Rcheck’
* using R version 3.6.3 (2020-02-29)
* using platform: x86_64-pc-linux-gnu (64-bit)
* using session charset: UTF-8
* checking for file ‘bdgenomics.adam/DESCRIPTION’ ... OK
* checking extension type ... Package
* this is package ‘bdgenomics.adam’ version ‘0.32.0’
* checking package namespace information ... OK
* checking package dependencies ... OK
* checking if this is a source package ... OK
* checking if there is a namespace ... OK
* checking for executable files ... OK
* checking for hidden files and directories ... OK
* checking for portable file names ... NOTE
Found the following non-portable file paths:
  bdgenomics.adam/man/rightOuterBroadcastRegionJoinAndGroupByRight-GenomicDataset-GenomicDataset-method.Rd
  bdgenomics.adam/man/rightOuterShuffleRegionJoinAndGroupByLeft-GenomicDataset-GenomicDataset-method.Rd

Tarballs are only required to store paths of up to 100 bytes and cannot
store those of more than 256 bytes, with restrictions including to 100
bytes for the final component.
See section ‘Package structure’ in the ‘Writing R Extensions’ manual.
* checking for sufficient/correct file permissions ... OK
* checking whether package ‘bdgenomics.adam’ can be installed ... OK
* checking installed package size ... OK
* checking package directory ... OK
* checking DESCRIPTION meta-information ... NOTE
Checking should be performed on sources prepared by ‘R CMD build’.
* checking top-level files ... OK
* checking for left-over files ... OK
* checking index information ... OK
* checking package subdirectories ... OK
* checking R files for non-ASCII characters ... OK
* checking R files for syntax errors ... OK
* checking whether the package can be loaded ... OK
* checking whether the package can be loaded with stated dependencies ... OK
* checking whether the package can be unloaded cleanly ... OK
* checking whether the namespace can be loaded with stated dependencies ... OK
* checking whether the namespace can be unloaded cleanly ... OK
* checking loading without being on the library search path ... OK
* checking dependencies in R code ... OK
* checking S3 generic/method consistency ... OK
* checking replacement functions ... OK
* checking foreign function calls ... OK
* checking R code for possible problems ... OK
* checking Rd files ... OK
* checking Rd metadata ... OK
* checking Rd cross-references ... OK
* checking for missing documentation entries ... OK
* checking for code/documentation mismatches ... OK
* checking Rd \usage sections ... WARNING
Undocumented arguments in documentation object 'toVariants,GenotypeDataset-method'
  ‘ardd’

Undocumented arguments in documentation object 'toVariants'
  ‘...’

Functions with \usage entries need to have the appropriate \alias
entries, and all their arguments documented.
The \usage entries must correspond to syntactically valid R code.
See chapter ‘Writing R documentation files’ in the ‘Writing R
Extensions’ manual.
* checking Rd contents ... OK
* checking for unstated dependencies in examples ... OK
* checking examples ... NONE
* checking for unstated dependencies in ‘tests’ ... OK
* checking tests ...
  Running ‘testthat.R’
 OK
* checking PDF version of manual ... OK
* DONE

Status: 1 WARNING, 2 NOTEs
See
  ‘/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-r/bdgenomics.adam.Rcheck/00check.log’
for details.


[INFO] 
[INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ adam-r-spark2_2.11 ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-surefire-plugin:3.0.0-M3:test (default-test) @ adam-r-spark2_2.11 ---
[INFO] No tests to run.
[INFO] 
[INFO] --- maven-jar-plugin:3.2.0:jar (default-jar) @ adam-r-spark2_2.11 ---
[WARNING] JAR will be empty - no content was marked for inclusion!
[INFO] Building jar: /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-r/target/adam-r-spark2_2.11-0.33.0-SNAPSHOT.jar
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary for ADAM_2.11 0.33.0-SNAPSHOT:
[INFO] 
[INFO] ADAM_2.11 .......................................... SUCCESS [  8.874 s]
[INFO] ADAM_2.11: Shader workaround ....................... SUCCESS [  1.150 s]
[INFO] ADAM_2.11: Avro-to-Dataset codegen utils ........... SUCCESS [  0.773 s]
[INFO] ADAM_2.11: Core .................................... SUCCESS [ 42.307 s]
[INFO] ADAM_2.11: APIs for Java, Python ................... SUCCESS [  2.366 s]
[INFO] ADAM_2.11: CLI ..................................... SUCCESS [  2.948 s]
[INFO] ADAM_2.11: Assembly ................................ SUCCESS [ 14.641 s]
[INFO] ADAM_2.11: R APIs .................................. SUCCESS [ 56.053 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time:  02:09 min
[INFO] Finished at: 2020-08-18T09:31:17-07:00
[INFO] ------------------------------------------------------------------------

# define filenames
BAM=mouse_chrM.bam
+ BAM=mouse_chrM.bam
READS=${BAM}.reads.adam
+ READS=mouse_chrM.bam.reads.adam
SORTED_READS=${BAM}.reads.sorted.adam
+ SORTED_READS=mouse_chrM.bam.reads.sorted.adam
FRAGMENTS=${BAM}.fragments.adam
+ FRAGMENTS=mouse_chrM.bam.fragments.adam
    
# fetch our input dataset
echo "Fetching BAM file"
+ echo 'Fetching BAM file'
Fetching BAM file
rm -rf ${BAM}
+ rm -rf mouse_chrM.bam
wget -q https://s3.amazonaws.com/bdgenomics-test/${BAM}
+ wget -q https://s3.amazonaws.com/bdgenomics-test/mouse_chrM.bam

# once fetched, convert BAM to ADAM
echo "Converting BAM to ADAM read format"
+ echo 'Converting BAM to ADAM read format'
Converting BAM to ADAM read format
rm -rf ${READS}
+ rm -rf mouse_chrM.bam.reads.adam
${ADAM} transformAlignments ${BAM} ${READS}
+ ./bin/adam-submit transformAlignments mouse_chrM.bam mouse_chrM.bam.reads.adam
Using ADAM_MAIN=org.bdgenomics.adam.cli.ADAMMain
Using spark-submit=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/bin/spark-submit
20/08/18 09:31:19 WARN Utils: Your hostname, research-jenkins-worker-07 resolves to a loopback address: 127.0.1.1; using 192.168.10.27 instead (on interface eth0)
20/08/18 09:31:19 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address
20/08/18 09:31:19 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
log4j:WARN No appenders could be found for logger (org.bdgenomics.adam.cli.ADAMMain).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
20/08/18 09:31:19 INFO SparkContext: Running Spark version 2.4.6
20/08/18 09:31:19 INFO SparkContext: Submitted application: transformAlignments
20/08/18 09:31:19 INFO SecurityManager: Changing view acls to: jenkins
20/08/18 09:31:19 INFO SecurityManager: Changing modify acls to: jenkins
20/08/18 09:31:19 INFO SecurityManager: Changing view acls groups to: 
20/08/18 09:31:19 INFO SecurityManager: Changing modify acls groups to: 
20/08/18 09:31:19 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(jenkins); groups with view permissions: Set(); users  with modify permissions: Set(jenkins); groups with modify permissions: Set()
20/08/18 09:31:20 INFO Utils: Successfully started service 'sparkDriver' on port 44815.
20/08/18 09:31:20 INFO SparkEnv: Registering MapOutputTracker
20/08/18 09:31:20 INFO SparkEnv: Registering BlockManagerMaster
20/08/18 09:31:20 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/08/18 09:31:20 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/08/18 09:31:20 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-86c53ff2-d284-467d-a8df-0f4c7473de1f
20/08/18 09:31:20 INFO MemoryStore: MemoryStore started with capacity 366.3 MB
20/08/18 09:31:20 INFO SparkEnv: Registering OutputCommitCoordinator
20/08/18 09:31:20 INFO Utils: Successfully started service 'SparkUI' on port 4040.
20/08/18 09:31:20 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.10.27:4040
20/08/18 09:31:20 INFO SparkContext: Added JAR file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar at spark://192.168.10.27:44815/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768280569
20/08/18 09:31:20 INFO Executor: Starting executor ID driver on host localhost
20/08/18 09:31:20 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 41389.
20/08/18 09:31:20 INFO NettyBlockTransferService: Server created on 192.168.10.27:41389
20/08/18 09:31:20 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/08/18 09:31:20 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.10.27, 41389, None)
20/08/18 09:31:20 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.10.27:41389 with 366.3 MB RAM, BlockManagerId(driver, 192.168.10.27, 41389, None)
20/08/18 09:31:20 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.10.27, 41389, None)
20/08/18 09:31:20 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.10.27, 41389, None)
20/08/18 09:31:21 INFO ADAMContext: Loading mouse_chrM.bam as BAM/CRAM/SAM and converting to Alignments.
20/08/18 09:31:21 INFO ADAMContext: Loaded header from file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam
20/08/18 09:31:21 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 284.3 KB, free 366.0 MB)
20/08/18 09:31:22 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 23.1 KB, free 366.0 MB)
20/08/18 09:31:22 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 192.168.10.27:41389 (size: 23.1 KB, free: 366.3 MB)
20/08/18 09:31:22 INFO SparkContext: Created broadcast 0 from newAPIHadoopFile at ADAMContext.scala:2053
20/08/18 09:31:23 INFO RDDBoundAlignmentDataset: Saving data in ADAM format
20/08/18 09:31:23 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:23 INFO FileInputFormat: Total input paths to process : 1
20/08/18 09:31:23 INFO SparkContext: Starting job: runJob at SparkHadoopWriter.scala:78
20/08/18 09:31:23 INFO DAGScheduler: Got job 0 (runJob at SparkHadoopWriter.scala:78) with 1 output partitions
20/08/18 09:31:23 INFO DAGScheduler: Final stage: ResultStage 0 (runJob at SparkHadoopWriter.scala:78)
20/08/18 09:31:23 INFO DAGScheduler: Parents of final stage: List()
20/08/18 09:31:23 INFO DAGScheduler: Missing parents: List()
20/08/18 09:31:23 INFO DAGScheduler: Submitting ResultStage 0 (MapPartitionsRDD[2] at map at GenomicDataset.scala:3805), which has no missing parents
20/08/18 09:31:23 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 82.1 KB, free 365.9 MB)
20/08/18 09:31:23 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 30.3 KB, free 365.9 MB)
20/08/18 09:31:23 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 192.168.10.27:41389 (size: 30.3 KB, free: 366.2 MB)
20/08/18 09:31:23 INFO SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1163
20/08/18 09:31:23 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[2] at map at GenomicDataset.scala:3805) (first 15 tasks are for partitions Vector(0))
20/08/18 09:31:23 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
20/08/18 09:31:23 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, PROCESS_LOCAL, 7962 bytes)
20/08/18 09:31:23 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
20/08/18 09:31:23 INFO Executor: Fetching spark://192.168.10.27:44815/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768280569
20/08/18 09:31:23 INFO TransportClientFactory: Successfully created connection to /192.168.10.27:44815 after 37 ms (0 ms spent in bootstraps)
20/08/18 09:31:23 INFO Utils: Fetching spark://192.168.10.27:44815/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to /tmp/spark-8cde3e01-d80e-4f34-8030-473353457da1/userFiles-c890057a-3b7f-4e89-a4d6-786c57ade4fb/fetchFileTemp7727991423442537560.tmp
20/08/18 09:31:23 INFO Executor: Adding file:/tmp/spark-8cde3e01-d80e-4f34-8030-473353457da1/userFiles-c890057a-3b7f-4e89-a4d6-786c57ade4fb/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to class loader
20/08/18 09:31:23 INFO NewHadoopRDD: Input split: file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam:83361792-833134657535
20/08/18 09:31:24 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:24 INFO CodecConfig: Compression: GZIP
20/08/18 09:31:24 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:24 INFO ParquetOutputFormat: Parquet block size to 134217728
20/08/18 09:31:24 INFO ParquetOutputFormat: Parquet page size to 1048576
20/08/18 09:31:24 INFO ParquetOutputFormat: Parquet dictionary page size to 1048576
20/08/18 09:31:24 INFO ParquetOutputFormat: Dictionary is on
20/08/18 09:31:24 INFO ParquetOutputFormat: Validation is off
20/08/18 09:31:24 INFO ParquetOutputFormat: Writer version is: PARQUET_1_0
20/08/18 09:31:24 INFO ParquetOutputFormat: Maximum row group padding size is 8388608 bytes
20/08/18 09:31:24 INFO ParquetOutputFormat: Page size checking is: estimated
20/08/18 09:31:24 INFO ParquetOutputFormat: Min row count for page size check is: 100
20/08/18 09:31:24 INFO ParquetOutputFormat: Max row count for page size check is: 10000
20/08/18 09:31:24 INFO CodecPool: Got brand-new compressor [.gz]
Ignoring SAM validation error: ERROR: Record 162622, Read name 613F0AAXX100423:3:58:9979:16082, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162624, Read name 613F0AAXX100423:6:13:3141:11793, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162625, Read name 613F0AAXX100423:8:39:18592:13552, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162635, Read name 613F1AAXX100423:7:2:13114:10698, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162637, Read name 613F1AAXX100423:6:100:8840:11167, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162639, Read name 613F1AAXX100423:8:15:10944:11181, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162640, Read name 613F1AAXX100423:8:17:5740:10104, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162651, Read name 613F1AAXX100423:1:53:11097:8261, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162654, Read name 613F1AAXX100423:2:112:16779:19612, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162657, Read name 613F0AAXX100423:8:28:7084:17683, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162659, Read name 613F0AAXX100423:8:39:19796:12794, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162662, Read name 613F1AAXX100423:5:116:9339:3264, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162667, Read name 613F0AAXX100423:4:67:2015:3054, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162669, Read name 613F0AAXX100423:7:7:11297:11738, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162674, Read name 613F0AAXX100423:6:59:10490:20829, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162678, Read name 613F1AAXX100423:8:11:17603:4766, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162682, Read name 613F0AAXX100423:5:86:10814:10257, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162683, Read name 613F0AAXX100423:5:117:14178:6111, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162685, Read name 613F0AAXX100423:2:3:13563:6720, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162689, Read name 613F0AAXX100423:7:59:16009:15799, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162696, Read name 613F0AAXX100423:5:31:9663:18252, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162698, Read name 613F1AAXX100423:2:27:12264:14626, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162699, Read name 613F0AAXX100423:1:120:19003:6647, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162702, Read name 613F1AAXX100423:3:37:6972:18407, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162704, Read name 613F1AAXX100423:3:77:6946:3880, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162706, Read name 613F0AAXX100423:7:48:2692:3492, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162708, Read name 613F1AAXX100423:7:80:8790:1648, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162710, Read name 6141AAAXX100423:5:30:15036:17610, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162712, Read name 613F1AAXX100423:8:80:6261:4465, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162713, Read name 6141AAAXX100423:5:74:5542:6195, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162715, Read name 613F1AAXX100423:5:14:14844:13639, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162718, Read name 613F1AAXX100423:7:112:14569:8480, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162725, Read name 613F1AAXX100423:4:56:10160:9879, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162727, Read name 6141AAAXX100423:7:89:12209:9221, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162731, Read name 6141AAAXX100423:6:55:1590:19793, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162732, Read name 6141AAAXX100423:7:102:16679:12368, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162734, Read name 613F1AAXX100423:2:7:4909:18472, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162737, Read name 6141AAAXX100423:4:73:6574:10572, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162741, Read name 6141AAAXX100423:1:8:14113:12655, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162743, Read name 6141AAAXX100423:3:40:7990:5056, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162744, Read name 6141AAAXX100423:4:36:15793:3411, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162745, Read name 6141AAAXX100423:8:83:1139:18985, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162746, Read name 6141AAAXX100423:5:7:18196:13562, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162748, Read name 6141AAAXX100423:3:114:5639:7123, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162751, Read name 6141AAAXX100423:7:47:4898:8640, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162753, Read name 6141AAAXX100423:3:64:8064:8165, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162756, Read name 613F1AAXX100423:1:105:14386:1684, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162757, Read name 613F1AAXX100423:6:98:1237:19470, MAPQ should be 0 for unmapped read.
Ignoring SAM validation error: ERROR: Record 162761, Read name 613F1AAXX100423:7:106:19658:9261, MAPQ should be 0 for unmapped read.
20/08/18 09:31:33 INFO InternalParquetRecordWriter: Flushing mem columnStore to file. allocated memory: 16043959
20/08/18 09:31:34 INFO FileOutputCommitter: Saved output of task 'attempt_20200818093123_0002_r_000000_0' to file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.reads.adam/_temporary/0/task_20200818093123_0002_r_000000
20/08/18 09:31:34 INFO SparkHadoopMapRedUtil: attempt_20200818093123_0002_r_000000_0: Committed
20/08/18 09:31:34 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 893 bytes result sent to driver
20/08/18 09:31:34 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 10703 ms on localhost (executor driver) (1/1)
20/08/18 09:31:34 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 
20/08/18 09:31:34 INFO DAGScheduler: ResultStage 0 (runJob at SparkHadoopWriter.scala:78) finished in 10.818 s
20/08/18 09:31:34 INFO DAGScheduler: Job 0 finished: runJob at SparkHadoopWriter.scala:78, took 10.871634 s
20/08/18 09:31:34 INFO ParquetFileReader: Initiating action with parallelism: 5
20/08/18 09:31:34 INFO SparkHadoopWriter: Job job_20200818093123_0002 committed.
20/08/18 09:31:34 INFO SparkContext: Invoking stop() from shutdown hook
20/08/18 09:31:34 INFO SparkUI: Stopped Spark web UI at http://192.168.10.27:4040
20/08/18 09:31:34 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/08/18 09:31:34 INFO MemoryStore: MemoryStore cleared
20/08/18 09:31:34 INFO BlockManager: BlockManager stopped
20/08/18 09:31:34 INFO BlockManagerMaster: BlockManagerMaster stopped
20/08/18 09:31:34 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/08/18 09:31:34 INFO SparkContext: Successfully stopped SparkContext
20/08/18 09:31:34 INFO ShutdownHookManager: Shutdown hook called
20/08/18 09:31:34 INFO ShutdownHookManager: Deleting directory /tmp/spark-414e7670-5d7f-4aae-b0c3-864a8e8ca803
20/08/18 09:31:34 INFO ShutdownHookManager: Deleting directory /tmp/spark-8cde3e01-d80e-4f34-8030-473353457da1

# then, sort the BAM
echo "Converting BAM to ADAM read format with sorting"
+ echo 'Converting BAM to ADAM read format with sorting'
Converting BAM to ADAM read format with sorting
rm -rf ${SORTED_READS}
+ rm -rf mouse_chrM.bam.reads.sorted.adam
${ADAM} transformAlignments -sort_by_reference_position ${READS} ${SORTED_READS}
+ ./bin/adam-submit transformAlignments -sort_by_reference_position mouse_chrM.bam.reads.adam mouse_chrM.bam.reads.sorted.adam
Using ADAM_MAIN=org.bdgenomics.adam.cli.ADAMMain
Using spark-submit=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/bin/spark-submit
20/08/18 09:31:35 WARN Utils: Your hostname, research-jenkins-worker-07 resolves to a loopback address: 127.0.1.1; using 192.168.10.27 instead (on interface eth0)
20/08/18 09:31:35 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address
20/08/18 09:31:35 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
log4j:WARN No appenders could be found for logger (org.bdgenomics.adam.cli.ADAMMain).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
20/08/18 09:31:36 INFO SparkContext: Running Spark version 2.4.6
20/08/18 09:31:36 INFO SparkContext: Submitted application: transformAlignments
20/08/18 09:31:36 INFO SecurityManager: Changing view acls to: jenkins
20/08/18 09:31:36 INFO SecurityManager: Changing modify acls to: jenkins
20/08/18 09:31:36 INFO SecurityManager: Changing view acls groups to: 
20/08/18 09:31:36 INFO SecurityManager: Changing modify acls groups to: 
20/08/18 09:31:36 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(jenkins); groups with view permissions: Set(); users  with modify permissions: Set(jenkins); groups with modify permissions: Set()
20/08/18 09:31:36 INFO Utils: Successfully started service 'sparkDriver' on port 46671.
20/08/18 09:31:36 INFO SparkEnv: Registering MapOutputTracker
20/08/18 09:31:36 INFO SparkEnv: Registering BlockManagerMaster
20/08/18 09:31:36 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/08/18 09:31:36 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/08/18 09:31:36 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-576aaede-f642-4c35-bc5b-28a8dc31ddb6
20/08/18 09:31:36 INFO MemoryStore: MemoryStore started with capacity 366.3 MB
20/08/18 09:31:36 INFO SparkEnv: Registering OutputCommitCoordinator
20/08/18 09:31:36 INFO Utils: Successfully started service 'SparkUI' on port 4040.
20/08/18 09:31:36 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.10.27:4040
20/08/18 09:31:37 INFO SparkContext: Added JAR file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar at spark://192.168.10.27:46671/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768297004
20/08/18 09:31:37 INFO Executor: Starting executor ID driver on host localhost
20/08/18 09:31:37 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34275.
20/08/18 09:31:37 INFO NettyBlockTransferService: Server created on 192.168.10.27:34275
20/08/18 09:31:37 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/08/18 09:31:37 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.10.27, 34275, None)
20/08/18 09:31:37 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.10.27:34275 with 366.3 MB RAM, BlockManagerId(driver, 192.168.10.27, 34275, None)
20/08/18 09:31:37 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.10.27, 34275, None)
20/08/18 09:31:37 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.10.27, 34275, None)
20/08/18 09:31:37 INFO ADAMContext: Loading mouse_chrM.bam.reads.adam as Parquet of Alignments.
20/08/18 09:31:38 INFO ADAMContext: Reading the ADAM file at mouse_chrM.bam.reads.adam to create RDD
20/08/18 09:31:39 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 303.7 KB, free 366.0 MB)
20/08/18 09:31:39 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 27.4 KB, free 366.0 MB)
20/08/18 09:31:39 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 192.168.10.27:34275 (size: 27.4 KB, free: 366.3 MB)
20/08/18 09:31:39 INFO SparkContext: Created broadcast 0 from newAPIHadoopFile at ADAMContext.scala:1792
20/08/18 09:31:39 INFO TransformAlignments: Sorting alignments by reference position, with references ordered by name
20/08/18 09:31:39 INFO RDDBoundAlignmentDataset: Sorting alignments by reference position
20/08/18 09:31:39 INFO FileInputFormat: Total input paths to process : 1
20/08/18 09:31:39 INFO ParquetInputFormat: Total input paths to process : 1
20/08/18 09:31:39 INFO RDDBoundAlignmentDataset: Saving data in ADAM format
20/08/18 09:31:39 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:39 INFO SparkContext: Starting job: runJob at SparkHadoopWriter.scala:78
20/08/18 09:31:39 INFO DAGScheduler: Registering RDD 2 (sortBy at AlignmentDataset.scala:1004) as input to shuffle 0
20/08/18 09:31:39 INFO DAGScheduler: Got job 0 (runJob at SparkHadoopWriter.scala:78) with 1 output partitions
20/08/18 09:31:39 INFO DAGScheduler: Final stage: ResultStage 1 (runJob at SparkHadoopWriter.scala:78)
20/08/18 09:31:39 INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage 0)
20/08/18 09:31:39 INFO DAGScheduler: Missing parents: List(ShuffleMapStage 0)
20/08/18 09:31:39 INFO DAGScheduler: Submitting ShuffleMapStage 0 (MapPartitionsRDD[2] at sortBy at AlignmentDataset.scala:1004), which has no missing parents
20/08/18 09:31:40 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 5.1 KB, free 366.0 MB)
20/08/18 09:31:40 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 3.0 KB, free 366.0 MB)
20/08/18 09:31:40 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 192.168.10.27:34275 (size: 3.0 KB, free: 366.3 MB)
20/08/18 09:31:40 INFO SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1163
20/08/18 09:31:40 INFO DAGScheduler: Submitting 1 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[2] at sortBy at AlignmentDataset.scala:1004) (first 15 tasks are for partitions Vector(0))
20/08/18 09:31:40 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
20/08/18 09:31:40 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, PROCESS_LOCAL, 8001 bytes)
20/08/18 09:31:40 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
20/08/18 09:31:40 INFO Executor: Fetching spark://192.168.10.27:46671/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768297004
20/08/18 09:31:40 INFO TransportClientFactory: Successfully created connection to /192.168.10.27:46671 after 39 ms (0 ms spent in bootstraps)
20/08/18 09:31:40 INFO Utils: Fetching spark://192.168.10.27:46671/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to /tmp/spark-9ea381b4-ed66-4faa-aded-0bbde0a9e4cb/userFiles-10a40a7d-7ff6-4d92-a894-11e58c147419/fetchFileTemp7123160432318069990.tmp
20/08/18 09:31:40 INFO Executor: Adding file:/tmp/spark-9ea381b4-ed66-4faa-aded-0bbde0a9e4cb/userFiles-10a40a7d-7ff6-4d92-a894-11e58c147419/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to class loader
20/08/18 09:31:40 INFO NewHadoopRDD: Input split: file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.reads.adam/part-r-00000.gz.parquet:0+10132211
20/08/18 09:31:40 INFO InternalParquetRecordReader: RecordReader initialized will read a total of 163064 records.
20/08/18 09:31:40 INFO InternalParquetRecordReader: at row 0. reading next block
20/08/18 09:31:40 INFO CodecPool: Got brand-new decompressor [.gz]
20/08/18 09:31:40 INFO InternalParquetRecordReader: block read in memory in 48 ms. row count = 163064
20/08/18 09:31:43 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 956 bytes result sent to driver
20/08/18 09:31:43 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 3476 ms on localhost (executor driver) (1/1)
20/08/18 09:31:43 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 
20/08/18 09:31:43 INFO DAGScheduler: ShuffleMapStage 0 (sortBy at AlignmentDataset.scala:1004) finished in 3.609 s
20/08/18 09:31:43 INFO DAGScheduler: looking for newly runnable stages
20/08/18 09:31:43 INFO DAGScheduler: running: Set()
20/08/18 09:31:43 INFO DAGScheduler: waiting: Set(ResultStage 1)
20/08/18 09:31:43 INFO DAGScheduler: failed: Set()
20/08/18 09:31:43 INFO DAGScheduler: Submitting ResultStage 1 (MapPartitionsRDD[5] at map at GenomicDataset.scala:3805), which has no missing parents
20/08/18 09:31:43 INFO MemoryStore: Block broadcast_2 stored as values in memory (estimated size 83.4 KB, free 365.9 MB)
20/08/18 09:31:43 INFO MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 31.0 KB, free 365.9 MB)
20/08/18 09:31:43 INFO BlockManagerInfo: Added broadcast_2_piece0 in memory on 192.168.10.27:34275 (size: 31.0 KB, free: 366.2 MB)
20/08/18 09:31:43 INFO SparkContext: Created broadcast 2 from broadcast at DAGScheduler.scala:1163
20/08/18 09:31:43 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 1 (MapPartitionsRDD[5] at map at GenomicDataset.scala:3805) (first 15 tasks are for partitions Vector(0))
20/08/18 09:31:43 INFO TaskSchedulerImpl: Adding task set 1.0 with 1 tasks
20/08/18 09:31:43 INFO TaskSetManager: Starting task 0.0 in stage 1.0 (TID 1, localhost, executor driver, partition 0, ANY, 7662 bytes)
20/08/18 09:31:43 INFO Executor: Running task 0.0 in stage 1.0 (TID 1)
20/08/18 09:31:43 INFO ShuffleBlockFetcherIterator: Getting 1 non-empty blocks including 1 local blocks and 0 remote blocks
20/08/18 09:31:43 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 7 ms
20/08/18 09:31:45 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:45 INFO CodecConfig: Compression: GZIP
20/08/18 09:31:45 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:45 INFO ParquetOutputFormat: Parquet block size to 134217728
20/08/18 09:31:45 INFO ParquetOutputFormat: Parquet page size to 1048576
20/08/18 09:31:45 INFO ParquetOutputFormat: Parquet dictionary page size to 1048576
20/08/18 09:31:45 INFO ParquetOutputFormat: Dictionary is on
20/08/18 09:31:45 INFO ParquetOutputFormat: Validation is off
20/08/18 09:31:45 INFO ParquetOutputFormat: Writer version is: PARQUET_1_0
20/08/18 09:31:45 INFO ParquetOutputFormat: Maximum row group padding size is 8388608 bytes
20/08/18 09:31:45 INFO ParquetOutputFormat: Page size checking is: estimated
20/08/18 09:31:45 INFO ParquetOutputFormat: Min row count for page size check is: 100
20/08/18 09:31:45 INFO ParquetOutputFormat: Max row count for page size check is: 10000
20/08/18 09:31:45 INFO CodecPool: Got brand-new compressor [.gz]
20/08/18 09:31:46 INFO BlockManagerInfo: Removed broadcast_1_piece0 on 192.168.10.27:34275 in memory (size: 3.0 KB, free: 366.2 MB)
20/08/18 09:31:50 INFO InternalParquetRecordWriter: Flushing mem columnStore to file. allocated memory: 16004474
20/08/18 09:31:50 INFO FileOutputCommitter: Saved output of task 'attempt_20200818093139_0005_r_000000_0' to file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.reads.sorted.adam/_temporary/0/task_20200818093139_0005_r_000000
20/08/18 09:31:50 INFO SparkHadoopMapRedUtil: attempt_20200818093139_0005_r_000000_0: Committed
20/08/18 09:31:50 INFO Executor: Finished task 0.0 in stage 1.0 (TID 1). 1237 bytes result sent to driver
20/08/18 09:31:50 INFO TaskSetManager: Finished task 0.0 in stage 1.0 (TID 1) in 6838 ms on localhost (executor driver) (1/1)
20/08/18 09:31:50 INFO TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool 
20/08/18 09:31:50 INFO DAGScheduler: ResultStage 1 (runJob at SparkHadoopWriter.scala:78) finished in 6.891 s
20/08/18 09:31:50 INFO DAGScheduler: Job 0 finished: runJob at SparkHadoopWriter.scala:78, took 10.571690 s
20/08/18 09:31:50 INFO ParquetFileReader: Initiating action with parallelism: 5
20/08/18 09:31:50 INFO SparkHadoopWriter: Job job_20200818093139_0005 committed.
20/08/18 09:31:50 INFO SparkContext: Invoking stop() from shutdown hook
20/08/18 09:31:50 INFO SparkUI: Stopped Spark web UI at http://192.168.10.27:4040
20/08/18 09:31:50 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/08/18 09:31:50 INFO MemoryStore: MemoryStore cleared
20/08/18 09:31:50 INFO BlockManager: BlockManager stopped
20/08/18 09:31:50 INFO BlockManagerMaster: BlockManagerMaster stopped
20/08/18 09:31:50 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/08/18 09:31:50 INFO SparkContext: Successfully stopped SparkContext
20/08/18 09:31:50 INFO ShutdownHookManager: Shutdown hook called
20/08/18 09:31:50 INFO ShutdownHookManager: Deleting directory /tmp/spark-9ea381b4-ed66-4faa-aded-0bbde0a9e4cb
20/08/18 09:31:50 INFO ShutdownHookManager: Deleting directory /tmp/spark-1ce10a61-02eb-41c2-9284-307434a9e033

# convert the reads to fragments to re-pair the reads
echo "Converting read file to fragments"
+ echo 'Converting read file to fragments'
Converting read file to fragments
rm -rf ${FRAGMENTS}
+ rm -rf mouse_chrM.bam.fragments.adam
${ADAM} transformFragments -load_as_alignments ${READS} ${FRAGMENTS}
+ ./bin/adam-submit transformFragments -load_as_alignments mouse_chrM.bam.reads.adam mouse_chrM.bam.fragments.adam
Using ADAM_MAIN=org.bdgenomics.adam.cli.ADAMMain
Using spark-submit=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/bin/spark-submit
20/08/18 09:31:51 WARN Utils: Your hostname, research-jenkins-worker-07 resolves to a loopback address: 127.0.1.1; using 192.168.10.27 instead (on interface eth0)
20/08/18 09:31:51 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address
20/08/18 09:31:52 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
log4j:WARN No appenders could be found for logger (org.bdgenomics.adam.cli.ADAMMain).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
20/08/18 09:31:52 INFO SparkContext: Running Spark version 2.4.6
20/08/18 09:31:52 INFO SparkContext: Submitted application: transformFragments
20/08/18 09:31:52 INFO SecurityManager: Changing view acls to: jenkins
20/08/18 09:31:52 INFO SecurityManager: Changing modify acls to: jenkins
20/08/18 09:31:52 INFO SecurityManager: Changing view acls groups to: 
20/08/18 09:31:52 INFO SecurityManager: Changing modify acls groups to: 
20/08/18 09:31:52 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(jenkins); groups with view permissions: Set(); users  with modify permissions: Set(jenkins); groups with modify permissions: Set()
20/08/18 09:31:52 INFO Utils: Successfully started service 'sparkDriver' on port 38943.
20/08/18 09:31:52 INFO SparkEnv: Registering MapOutputTracker
20/08/18 09:31:52 INFO SparkEnv: Registering BlockManagerMaster
20/08/18 09:31:52 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/08/18 09:31:52 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/08/18 09:31:52 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-736eda19-6852-4fc9-b544-4dec4a111dab
20/08/18 09:31:52 INFO MemoryStore: MemoryStore started with capacity 366.3 MB
20/08/18 09:31:53 INFO SparkEnv: Registering OutputCommitCoordinator
20/08/18 09:31:53 INFO Utils: Successfully started service 'SparkUI' on port 4040.
20/08/18 09:31:53 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.10.27:4040
20/08/18 09:31:53 INFO SparkContext: Added JAR file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar at spark://192.168.10.27:38943/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768313239
20/08/18 09:31:53 INFO Executor: Starting executor ID driver on host localhost
20/08/18 09:31:53 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35499.
20/08/18 09:31:53 INFO NettyBlockTransferService: Server created on 192.168.10.27:35499
20/08/18 09:31:53 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/08/18 09:31:53 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.10.27, 35499, None)
20/08/18 09:31:53 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.10.27:35499 with 366.3 MB RAM, BlockManagerId(driver, 192.168.10.27, 35499, None)
20/08/18 09:31:53 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.10.27, 35499, None)
20/08/18 09:31:53 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.10.27, 35499, None)
20/08/18 09:31:53 INFO ADAMContext: Loading mouse_chrM.bam.reads.adam as Parquet of Alignments.
20/08/18 09:31:55 INFO ADAMContext: Reading the ADAM file at mouse_chrM.bam.reads.adam to create RDD
20/08/18 09:31:55 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 303.7 KB, free 366.0 MB)
20/08/18 09:31:55 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 27.4 KB, free 366.0 MB)
20/08/18 09:31:55 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 192.168.10.27:35499 (size: 27.4 KB, free: 366.3 MB)
20/08/18 09:31:55 INFO SparkContext: Created broadcast 0 from newAPIHadoopFile at ADAMContext.scala:1792
20/08/18 09:31:55 INFO FileInputFormat: Total input paths to process : 1
20/08/18 09:31:55 INFO ParquetInputFormat: Total input paths to process : 1
20/08/18 09:31:56 INFO RDDBoundFragmentDataset: Saving data in ADAM format
20/08/18 09:31:56 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:31:56 INFO SparkContext: Starting job: runJob at SparkHadoopWriter.scala:78
20/08/18 09:31:56 INFO DAGScheduler: Registering RDD 2 (groupBy at SingleReadBucket.scala:97) as input to shuffle 0
20/08/18 09:31:56 INFO DAGScheduler: Got job 0 (runJob at SparkHadoopWriter.scala:78) with 1 output partitions
20/08/18 09:31:56 INFO DAGScheduler: Final stage: ResultStage 1 (runJob at SparkHadoopWriter.scala:78)
20/08/18 09:31:56 INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage 0)
20/08/18 09:31:56 INFO DAGScheduler: Missing parents: List(ShuffleMapStage 0)
20/08/18 09:31:56 INFO DAGScheduler: Submitting ShuffleMapStage 0 (MapPartitionsRDD[2] at groupBy at SingleReadBucket.scala:97), which has no missing parents
20/08/18 09:31:56 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 5.5 KB, free 366.0 MB)
20/08/18 09:31:56 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 3.1 KB, free 366.0 MB)
20/08/18 09:31:56 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 192.168.10.27:35499 (size: 3.1 KB, free: 366.3 MB)
20/08/18 09:31:56 INFO SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1163
20/08/18 09:31:56 INFO DAGScheduler: Submitting 1 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[2] at groupBy at SingleReadBucket.scala:97) (first 15 tasks are for partitions Vector(0))
20/08/18 09:31:56 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
20/08/18 09:31:56 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, PROCESS_LOCAL, 8001 bytes)
20/08/18 09:31:56 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
20/08/18 09:31:56 INFO Executor: Fetching spark://192.168.10.27:38943/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768313239
20/08/18 09:31:56 INFO TransportClientFactory: Successfully created connection to /192.168.10.27:38943 after 34 ms (0 ms spent in bootstraps)
20/08/18 09:31:56 INFO Utils: Fetching spark://192.168.10.27:38943/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to /tmp/spark-d9b55d70-5e21-4ed4-a658-d39108493f2e/userFiles-ce569103-53ee-4bd1-8713-5b0799fd842e/fetchFileTemp808293513186709801.tmp
20/08/18 09:31:56 INFO Executor: Adding file:/tmp/spark-d9b55d70-5e21-4ed4-a658-d39108493f2e/userFiles-ce569103-53ee-4bd1-8713-5b0799fd842e/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to class loader
20/08/18 09:31:56 INFO NewHadoopRDD: Input split: file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.reads.adam/part-r-00000.gz.parquet:0+10132211
20/08/18 09:31:56 INFO InternalParquetRecordReader: RecordReader initialized will read a total of 163064 records.
20/08/18 09:31:56 INFO InternalParquetRecordReader: at row 0. reading next block
20/08/18 09:31:56 INFO CodecPool: Got brand-new decompressor [.gz]
20/08/18 09:31:56 INFO InternalParquetRecordReader: block read in memory in 44 ms. row count = 163064
20/08/18 09:31:59 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 956 bytes result sent to driver
20/08/18 09:31:59 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 3438 ms on localhost (executor driver) (1/1)
20/08/18 09:31:59 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 
20/08/18 09:31:59 INFO DAGScheduler: ShuffleMapStage 0 (groupBy at SingleReadBucket.scala:97) finished in 3.542 s
20/08/18 09:31:59 INFO DAGScheduler: looking for newly runnable stages
20/08/18 09:31:59 INFO DAGScheduler: running: Set()
20/08/18 09:31:59 INFO DAGScheduler: waiting: Set(ResultStage 1)
20/08/18 09:31:59 INFO DAGScheduler: failed: Set()
20/08/18 09:31:59 INFO DAGScheduler: Submitting ResultStage 1 (MapPartitionsRDD[6] at map at GenomicDataset.scala:3805), which has no missing parents
20/08/18 09:31:59 INFO MemoryStore: Block broadcast_2 stored as values in memory (estimated size 86.4 KB, free 365.9 MB)
20/08/18 09:31:59 INFO MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 32.3 KB, free 365.9 MB)
20/08/18 09:31:59 INFO BlockManagerInfo: Added broadcast_2_piece0 in memory on 192.168.10.27:35499 (size: 32.3 KB, free: 366.2 MB)
20/08/18 09:31:59 INFO SparkContext: Created broadcast 2 from broadcast at DAGScheduler.scala:1163
20/08/18 09:31:59 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 1 (MapPartitionsRDD[6] at map at GenomicDataset.scala:3805) (first 15 tasks are for partitions Vector(0))
20/08/18 09:31:59 INFO TaskSchedulerImpl: Adding task set 1.0 with 1 tasks
20/08/18 09:31:59 INFO TaskSetManager: Starting task 0.0 in stage 1.0 (TID 1, localhost, executor driver, partition 0, ANY, 7662 bytes)
20/08/18 09:31:59 INFO Executor: Running task 0.0 in stage 1.0 (TID 1)
20/08/18 09:31:59 INFO ShuffleBlockFetcherIterator: Getting 1 non-empty blocks including 1 local blocks and 0 remote blocks
20/08/18 09:31:59 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 8 ms
20/08/18 09:32:01 INFO BlockManagerInfo: Removed broadcast_1_piece0 on 192.168.10.27:35499 in memory (size: 3.1 KB, free: 366.2 MB)
20/08/18 09:32:01 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:32:01 INFO CodecConfig: Compression: GZIP
20/08/18 09:32:01 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
20/08/18 09:32:01 INFO ParquetOutputFormat: Parquet block size to 134217728
20/08/18 09:32:01 INFO ParquetOutputFormat: Parquet page size to 1048576
20/08/18 09:32:01 INFO ParquetOutputFormat: Parquet dictionary page size to 1048576
20/08/18 09:32:01 INFO ParquetOutputFormat: Dictionary is on
20/08/18 09:32:01 INFO ParquetOutputFormat: Validation is off
20/08/18 09:32:01 INFO ParquetOutputFormat: Writer version is: PARQUET_1_0
20/08/18 09:32:01 INFO ParquetOutputFormat: Maximum row group padding size is 8388608 bytes
20/08/18 09:32:01 INFO ParquetOutputFormat: Page size checking is: estimated
20/08/18 09:32:01 INFO ParquetOutputFormat: Min row count for page size check is: 100
20/08/18 09:32:01 INFO ParquetOutputFormat: Max row count for page size check is: 10000
20/08/18 09:32:01 INFO CodecPool: Got brand-new compressor [.gz]
20/08/18 09:32:08 INFO InternalParquetRecordWriter: Flushing mem columnStore to file. allocated memory: 21417928
20/08/18 09:32:08 INFO FileOutputCommitter: Saved output of task 'attempt_20200818093156_0006_r_000000_0' to file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.fragments.adam/_temporary/0/task_20200818093156_0006_r_000000
20/08/18 09:32:08 INFO SparkHadoopMapRedUtil: attempt_20200818093156_0006_r_000000_0: Committed
20/08/18 09:32:08 INFO Executor: Finished task 0.0 in stage 1.0 (TID 1). 1280 bytes result sent to driver
20/08/18 09:32:08 INFO TaskSetManager: Finished task 0.0 in stage 1.0 (TID 1) in 8899 ms on localhost (executor driver) (1/1)
20/08/18 09:32:08 INFO TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool 
20/08/18 09:32:08 INFO DAGScheduler: ResultStage 1 (runJob at SparkHadoopWriter.scala:78) finished in 8.942 s
20/08/18 09:32:08 INFO DAGScheduler: Job 0 finished: runJob at SparkHadoopWriter.scala:78, took 12.568094 s
20/08/18 09:32:08 INFO ParquetFileReader: Initiating action with parallelism: 5
20/08/18 09:32:08 INFO SparkHadoopWriter: Job job_20200818093156_0006 committed.
20/08/18 09:32:08 INFO SparkContext: Invoking stop() from shutdown hook
20/08/18 09:32:08 INFO SparkUI: Stopped Spark web UI at http://192.168.10.27:4040
20/08/18 09:32:08 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/08/18 09:32:08 INFO MemoryStore: MemoryStore cleared
20/08/18 09:32:08 INFO BlockManager: BlockManager stopped
20/08/18 09:32:08 INFO BlockManagerMaster: BlockManagerMaster stopped
20/08/18 09:32:08 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/08/18 09:32:08 INFO SparkContext: Successfully stopped SparkContext
20/08/18 09:32:08 INFO ShutdownHookManager: Shutdown hook called
20/08/18 09:32:08 INFO ShutdownHookManager: Deleting directory /tmp/spark-d9b55d70-5e21-4ed4-a658-d39108493f2e
20/08/18 09:32:08 INFO ShutdownHookManager: Deleting directory /tmp/spark-edd33464-f3c9-4efb-8fd0-65818cb82e40

# test that printing works
echo "Printing reads and fragments"
+ echo 'Printing reads and fragments'
Printing reads and fragments
${ADAM} print ${READS} 1>/dev/null 2>/dev/null
+ ./bin/adam-submit print mouse_chrM.bam.reads.adam
${ADAM} print ${FRAGMENTS} 1>/dev/null 2>/dev/null
+ ./bin/adam-submit print mouse_chrM.bam.fragments.adam

# run flagstat to verify that flagstat runs OK
echo "Printing read statistics"
+ echo 'Printing read statistics'
Printing read statistics
${ADAM} flagstat ${READS}
+ ./bin/adam-submit flagstat mouse_chrM.bam.reads.adam
Using ADAM_MAIN=org.bdgenomics.adam.cli.ADAMMain
Using spark-submit=/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/spark-2.4.6-bin-hadoop2.7/bin/spark-submit
20/08/18 09:32:26 WARN Utils: Your hostname, research-jenkins-worker-07 resolves to a loopback address: 127.0.1.1; using 192.168.10.27 instead (on interface eth0)
20/08/18 09:32:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address
20/08/18 09:32:26 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
log4j:WARN No appenders could be found for logger (org.bdgenomics.adam.cli.ADAMMain).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
20/08/18 09:32:26 INFO SparkContext: Running Spark version 2.4.6
20/08/18 09:32:26 INFO SparkContext: Submitted application: flagstat
20/08/18 09:32:26 INFO SecurityManager: Changing view acls to: jenkins
20/08/18 09:32:26 INFO SecurityManager: Changing modify acls to: jenkins
20/08/18 09:32:26 INFO SecurityManager: Changing view acls groups to: 
20/08/18 09:32:26 INFO SecurityManager: Changing modify acls groups to: 
20/08/18 09:32:26 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(jenkins); groups with view permissions: Set(); users  with modify permissions: Set(jenkins); groups with modify permissions: Set()
20/08/18 09:32:27 INFO Utils: Successfully started service 'sparkDriver' on port 36973.
20/08/18 09:32:27 INFO SparkEnv: Registering MapOutputTracker
20/08/18 09:32:27 INFO SparkEnv: Registering BlockManagerMaster
20/08/18 09:32:27 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/08/18 09:32:27 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/08/18 09:32:27 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-25d16f27-ac05-40eb-8a3b-ed34afcfa409
20/08/18 09:32:27 INFO MemoryStore: MemoryStore started with capacity 366.3 MB
20/08/18 09:32:27 INFO SparkEnv: Registering OutputCommitCoordinator
20/08/18 09:32:27 INFO Utils: Successfully started service 'SparkUI' on port 4040.
20/08/18 09:32:27 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.10.27:4040
20/08/18 09:32:27 INFO SparkContext: Added JAR file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/adam-assembly/target/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar at spark://192.168.10.27:36973/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768347570
20/08/18 09:32:27 INFO Executor: Starting executor ID driver on host localhost
20/08/18 09:32:27 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35651.
20/08/18 09:32:27 INFO NettyBlockTransferService: Server created on 192.168.10.27:35651
20/08/18 09:32:27 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/08/18 09:32:27 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.10.27, 35651, None)
20/08/18 09:32:27 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.10.27:35651 with 366.3 MB RAM, BlockManagerId(driver, 192.168.10.27, 35651, None)
20/08/18 09:32:27 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.10.27, 35651, None)
20/08/18 09:32:27 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.10.27, 35651, None)
20/08/18 09:32:28 INFO ADAMContext: Loading mouse_chrM.bam.reads.adam as Parquet of Alignments.
20/08/18 09:32:28 INFO ADAMContext: Reading the ADAM file at mouse_chrM.bam.reads.adam to create RDD
20/08/18 09:32:28 INFO ADAMContext: Using the specified projection schema
20/08/18 09:32:28 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 313.1 KB, free 366.0 MB)
20/08/18 09:32:29 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 28.4 KB, free 366.0 MB)
20/08/18 09:32:29 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 192.168.10.27:35651 (size: 28.4 KB, free: 366.3 MB)
20/08/18 09:32:29 INFO SparkContext: Created broadcast 0 from newAPIHadoopFile at ADAMContext.scala:1792
20/08/18 09:32:30 INFO FileInputFormat: Total input paths to process : 1
20/08/18 09:32:30 INFO ParquetInputFormat: Total input paths to process : 1
20/08/18 09:32:30 INFO SparkContext: Starting job: aggregate at FlagStat.scala:115
20/08/18 09:32:30 INFO DAGScheduler: Got job 0 (aggregate at FlagStat.scala:115) with 1 output partitions
20/08/18 09:32:30 INFO DAGScheduler: Final stage: ResultStage 0 (aggregate at FlagStat.scala:115)
20/08/18 09:32:30 INFO DAGScheduler: Parents of final stage: List()
20/08/18 09:32:30 INFO DAGScheduler: Missing parents: List()
20/08/18 09:32:30 INFO DAGScheduler: Submitting ResultStage 0 (MapPartitionsRDD[2] at map at FlagStat.scala:96), which has no missing parents
20/08/18 09:32:30 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 4.3 KB, free 366.0 MB)
20/08/18 09:32:30 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 2.5 KB, free 366.0 MB)
20/08/18 09:32:30 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 192.168.10.27:35651 (size: 2.5 KB, free: 366.3 MB)
20/08/18 09:32:30 INFO SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1163
20/08/18 09:32:30 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 0 (MapPartitionsRDD[2] at map at FlagStat.scala:96) (first 15 tasks are for partitions Vector(0))
20/08/18 09:32:30 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
20/08/18 09:32:30 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, PROCESS_LOCAL, 8012 bytes)
20/08/18 09:32:30 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
20/08/18 09:32:30 INFO Executor: Fetching spark://192.168.10.27:36973/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar with timestamp 1597768347570
20/08/18 09:32:30 INFO TransportClientFactory: Successfully created connection to /192.168.10.27:36973 after 34 ms (0 ms spent in bootstraps)
20/08/18 09:32:30 INFO Utils: Fetching spark://192.168.10.27:36973/jars/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to /tmp/spark-c4dba1a3-7494-4aa5-bd94-8d33b8981399/userFiles-bb4ecf0a-f951-43c5-883c-0ccc9bb661ff/fetchFileTemp1698902613442371034.tmp
20/08/18 09:32:30 INFO Executor: Adding file:/tmp/spark-c4dba1a3-7494-4aa5-bd94-8d33b8981399/userFiles-bb4ecf0a-f951-43c5-883c-0ccc9bb661ff/adam-assembly-spark2_2.11-0.33.0-SNAPSHOT.jar to class loader
20/08/18 09:32:31 INFO NewHadoopRDD: Input split: file:/tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded/mouse_chrM.bam.reads.adam/part-r-00000.gz.parquet:0+10132211
20/08/18 09:32:31 INFO InternalParquetRecordReader: RecordReader initialized will read a total of 163064 records.
20/08/18 09:32:31 INFO InternalParquetRecordReader: at row 0. reading next block
20/08/18 09:32:31 INFO CodecPool: Got brand-new decompressor [.gz]
20/08/18 09:32:31 INFO InternalParquetRecordReader: block read in memory in 25 ms. row count = 163064
20/08/18 09:32:32 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 859 bytes result sent to driver
20/08/18 09:32:32 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 1485 ms on localhost (executor driver) (1/1)
20/08/18 09:32:32 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 
20/08/18 09:32:32 INFO DAGScheduler: ResultStage 0 (aggregate at FlagStat.scala:115) finished in 1.569 s
20/08/18 09:32:32 INFO DAGScheduler: Job 0 finished: aggregate at FlagStat.scala:115, took 1.620318 s
163064 + 0 in total (QC-passed reads + QC-failed reads)
0 + 0 primary duplicates
0 + 0 primary duplicates - both read and mate mapped
0 + 0 primary duplicates - only read mapped
0 + 0 primary duplicates - cross chromosome
0 + 0 secondary duplicates
0 + 0 secondary duplicates - both read and mate mapped
0 + 0 secondary duplicates - only read mapped
0 + 0 secondary duplicates - cross chromosome
160512 + 0 mapped (98.43%:0.00%)
163064 + 0 paired in sequencing
81524 + 0 read1
81540 + 0 read2
154982 + 0 properly paired (95.04%:0.00%)
158044 + 0 with itself and mate mapped
2468 + 0 singletons (1.51%:0.00%)
418 + 0 with mate mapped to a different chr
120 + 0 with mate mapped to a different chr (mapQ>=5)
20/08/18 09:32:32 INFO SparkContext: Invoking stop() from shutdown hook
20/08/18 09:32:32 INFO SparkUI: Stopped Spark web UI at http://192.168.10.27:4040
20/08/18 09:32:32 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/08/18 09:32:32 INFO MemoryStore: MemoryStore cleared
20/08/18 09:32:32 INFO BlockManager: BlockManager stopped
20/08/18 09:32:32 INFO BlockManagerMaster: BlockManagerMaster stopped
20/08/18 09:32:32 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/08/18 09:32:32 INFO SparkContext: Successfully stopped SparkContext
20/08/18 09:32:32 INFO ShutdownHookManager: Shutdown hook called
20/08/18 09:32:32 INFO ShutdownHookManager: Deleting directory /tmp/spark-c4dba1a3-7494-4aa5-bd94-8d33b8981399
20/08/18 09:32:32 INFO ShutdownHookManager: Deleting directory /tmp/spark-5d6032e9-aed6-4d39-b274-a0ea8f3d571d
rm -rf ${ADAM_TMP_DIR}
+ rm -rf /tmp/adamTest3coIExW/deleteMePleaseThisIsNoLongerNeeded
popd
+ popd
~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu

pushd ${PROJECT_ROOT}
+ pushd /home/jenkins/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu/scripts/..
~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu ~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu

# move back to Scala 2.12 as default
if [ ${SCALA_VERSION} == 2.11 ];
then
    set +e
    ./scripts/move_to_scala_2.12.sh
    set -e
fi
+ '[' 2.11 == 2.11 ']'
+ set +e
+ ./scripts/move_to_scala_2.12.sh
+ set -e
# move back to Spark 3.x as default
if [ ${SPARK_VERSION} == 2.4.6 ];
then
    set +e
    ./scripts/move_to_spark_3.sh
    set -e
fi
+ '[' 2.4.6 == 2.4.6 ']'
+ set +e
+ ./scripts/move_to_spark_3.sh
+ set -e

# test that the source is formatted correctly
./scripts/format-source
+ ./scripts/format-source
+++ dirname ./scripts/format-source
++ cd ./scripts
++ pwd
+ DIR=/home/jenkins/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu/scripts
+ pushd /home/jenkins/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu/scripts/..
~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu ~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu
+ mvn org.scalariform:scalariform-maven-plugin:format license:format
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=1g; support was removed in 8.0
[INFO] Scanning for projects...
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Build Order:
[INFO] 
[INFO] ADAM_2.12                                                          [pom]
[INFO] ADAM_2.12: Shader workaround                                       [jar]
[INFO] ADAM_2.12: Avro-to-Dataset codegen utils                           [jar]
[INFO] ADAM_2.12: Core                                                    [jar]
[INFO] ADAM_2.12: APIs for Java, Python                                   [jar]
[INFO] ADAM_2.12: CLI                                                     [jar]
[INFO] ADAM_2.12: Assembly                                                [jar]
[INFO] 
[INFO] ------------< org.bdgenomics.adam:adam-parent-spark3_2.12 >-------------
[INFO] Building ADAM_2.12 0.33.0-SNAPSHOT                                 [1/7]
[INFO] --------------------------------[ pom ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-parent-spark3_2.12 ---
[INFO] Modified 2 of 244 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-parent-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-shade-spark3_2.12 >-------------
[INFO] Building ADAM_2.12: Shader workaround 0.33.0-SNAPSHOT              [2/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-shade-spark3_2.12 ---
[INFO] Modified 0 of 0 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-shade-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] ------------< org.bdgenomics.adam:adam-codegen-spark3_2.12 >------------
[INFO] Building ADAM_2.12: Avro-to-Dataset codegen utils 0.33.0-SNAPSHOT  [3/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-codegen-spark3_2.12 ---
[INFO] Modified 0 of 4 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-codegen-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-core-spark3_2.12 >--------------
[INFO] Building ADAM_2.12: Core 0.33.0-SNAPSHOT                           [4/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-core-spark3_2.12 ---
[INFO] Modified 0 of 204 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-core-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] -------------< org.bdgenomics.adam:adam-apis-spark3_2.12 >--------------
[INFO] Building ADAM_2.12: APIs for Java, Python 0.33.0-SNAPSHOT          [5/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-apis-spark3_2.12 ---
[INFO] Modified 0 of 5 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-apis-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] --------------< org.bdgenomics.adam:adam-cli-spark3_2.12 >--------------
[INFO] Building ADAM_2.12: CLI 0.33.0-SNAPSHOT                            [6/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-cli-spark3_2.12 ---
[INFO] Modified 0 of 29 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-cli-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] 
[INFO] -----------< org.bdgenomics.adam:adam-assembly-spark3_2.12 >------------
[INFO] Building ADAM_2.12: Assembly 0.33.0-SNAPSHOT                       [7/7]
[INFO] --------------------------------[ jar ]---------------------------------
[INFO] 
[INFO] --- scalariform-maven-plugin:0.1.4:format (default-cli) @ adam-assembly-spark3_2.12 ---
[INFO] Modified 0 of 1 .scala files
[INFO] 
[INFO] --- maven-license-plugin:1.10.b1:format (default-cli) @ adam-assembly-spark3_2.12 ---
[INFO] Updating license headers...
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary for ADAM_2.12 0.33.0-SNAPSHOT:
[INFO] 
[INFO] ADAM_2.12 .......................................... SUCCESS [  7.982 s]
[INFO] ADAM_2.12: Shader workaround ....................... SUCCESS [  0.043 s]
[INFO] ADAM_2.12: Avro-to-Dataset codegen utils ........... SUCCESS [  0.061 s]
[INFO] ADAM_2.12: Core .................................... SUCCESS [  4.503 s]
[INFO] ADAM_2.12: APIs for Java, Python ................... SUCCESS [  0.155 s]
[INFO] ADAM_2.12: CLI ..................................... SUCCESS [  0.251 s]
[INFO] ADAM_2.12: Assembly ................................ SUCCESS [  0.020 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time:  13.521 s
[INFO] Finished at: 2020-08-18T09:32:47-07:00
[INFO] ------------------------------------------------------------------------
+ popd
~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu
if test -n "$(git status --porcelain)"
then
    echo "Please run './scripts/format-source'"
    exit 1
fi
git status --porcelain
++ git status --porcelain
+ test -n ''
popd    
+ popd
~/workspace/ADAM-prb/HADOOP_VERSION/2.7.5/SCALA_VERSION/2.11/SPARK_VERSION/2.4.6/label/ubuntu

echo
+ echo

echo "All the tests passed"
+ echo 'All the tests passed'
All the tests passed
echo
+ echo

Recording test results
Publishing Scoverage XML and HTML report...
Setting commit status on GitHub for https://github.com/bigdatagenomics/adam/commit/484368b617d462edcaf6454ac4643b1ec9ca8eb0
Finished: SUCCESS